Author | SHA1 | Message | Date |
---|---|---|---|
Amélia Liao | 3a29725003 | dump | 2 years ago |
Amélia Liao | 8ba6920ff4 | parsing layout | 3 years ago |
@ -0,0 +1,98 @@ | |||
$code-bg: hsl(230,1%,98%); | |||
$code-fg: #ABB2BF; | |||
$code-red: #D65122; | |||
$code-red-br: #AE3B36; | |||
$code-green: #88B966; | |||
$code-yellow: #DEB468; | |||
$code-orange: #C58853; | |||
$code-blue: #519DEB; | |||
$code-pink: #C678DD; | |||
$code-cyan: #48A8B5; | |||
$code-white: #ABB2BF; | |||
$code-grey: #7F848E; | |||
@font-face { | |||
font-family: 'Iosevka'; | |||
font-display: swap; | |||
font-weight: 400; | |||
font-stretch: normal; | |||
font-style: normal; | |||
src: url('/static/woff2/iosevk-abbie-regular.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-regular.ttf') format('truetype'); | |||
} | |||
body { | |||
margin: 0; | |||
background-color:$code-bg; | |||
color:#ABB2BF; | |||
} | |||
html { | |||
background-color: $code-bg; | |||
color:#ABB2BF; | |||
} | |||
pre.Agda { | |||
margin: 0; | |||
padding: 1em; | |||
background-color: $code-bg; | |||
color: $code-fg; | |||
} | |||
@keyframes highlight { | |||
0% { | |||
background-color: #F5DEB3; | |||
} | |||
100% { | |||
background-color: $code-bg; | |||
} | |||
} | |||
/* Aspects. */ | |||
.Agda { | |||
.Comment { color: $code-grey; } | |||
.Background { background-color: $code-bg; } | |||
.Markup { color: $code-fg; } | |||
.Keyword { color: $code-orange; } | |||
.String { color: $code-red; } | |||
.Number { color: $code-pink; } | |||
.Symbol { color: $code-fg; } | |||
.PrimitiveType { color: $code-blue; } | |||
.Pragma { color: $code-fg; } | |||
/* NameKinds. */ | |||
.Bound { color: $code-fg; } | |||
.Generalizable { color: $code-fg; } | |||
.InductiveConstructor { color: $code-green; } | |||
.CoinductiveConstructor { color: $code-green; } | |||
.Datatype { color: $code-blue; } | |||
.Field { color: #F570B7; } | |||
.Function { color: $code-blue; } | |||
.Module { color: $code-pink; } | |||
.Postulate { color: $code-blue; } | |||
.Primitive { color: $code-blue; } | |||
.Record { color: $code-blue; } | |||
/* OtherAspects. */ | |||
.UnsolvedMeta { color: $code-fg; background: yellow } | |||
.UnsolvedConstraint { color: $code-fg; background: yellow } | |||
.TerminationProblem { color: $code-fg; background: #FFA07A } | |||
.IncompletePattern { color: $code-fg; background: #F5DEB3 } | |||
.Error { color: red; text-decoration: underline } | |||
.TypeChecks { color: $code-fg; background: #ADD8E6 } | |||
.Deadcode { color: $code-fg; background: #808080 } | |||
.ShadowingInTelescope { color: $code-fg; background: #808080 } | |||
/* Standard attributes. */ | |||
a { text-decoration: none } | |||
a[href]:hover { | |||
text-decoration: 2px #B4EEB4 underline dotted; | |||
} | |||
a[href]:target { | |||
animation: highlight 2.5s; | |||
} | |||
background-color: #282C34; | |||
font-family: 'Iosevka', 'Fantasque Sans Mono', 'Roboto Mono', monospace; | |||
font-weight: 400; | |||
font-size: 16pt; | |||
} |
@ -1,45 +0,0 @@ | |||
code, div.sourceCode { | |||
font-family: Fantasque Sans Mono, Fira Mono, monospace; | |||
font-size: 15pt; | |||
} | |||
code.inline { | |||
background-color: rgb(250,250,250); | |||
/* border: 1px solid rgb(200,200,200); */ | |||
} | |||
table.sourceCode, tr.sourceCode, td.lineNumbers, td.sourceCode { | |||
margin: 0; padding: 0; vertical-align: baseline; border: none; | |||
} | |||
table.sourceCode { | |||
width: 100%; line-height: 100%; | |||
} | |||
td.lineNumbers { text-align: right; padding-right: 4px; padding-left: 4px; color: #aaaaaa; border-right: 1px solid #aaaaaa; } | |||
td.sourceCode { padding-left: 5px; } | |||
code.kw, span.kw { color: #af005f; } /* Keyword */ | |||
code.dt, span.dt { color: #5f5faf; } /* DataType */ | |||
code.dv, span.dv { color: #268bd2; } /* DecVal */ | |||
code.bn, span.bn { color: #268bd2; } /* BaseN */ | |||
code.fl, span.fl { color: #268bd2; } /* Float */ | |||
code.ch, span.ch { color: #cb4b16; } /* Char */ | |||
code.st, span.st { color: #cb4b16; } /* String */ | |||
code.co, span.co { color: #8a8a8a; } /* Comment */ | |||
code.ot, span.ot { color: #007020; } /* Other */ | |||
code.al, span.al { color: #ff0000; } /* Alert */ | |||
code.fu, span.fu { color: #666666; } /* Function */ | |||
code.er, span.er { color: #ff0000; } /* Error */ | |||
code.wa, span.wa { color: #60a0b0; } /* Warning */ | |||
code.cn, span.cn { color: #880000; } /* Constant */ | |||
code.sc, span.sc { color: #4070a0; } /* SpecialChar */ | |||
code.vs, span.vs { color: #4070a0; } /* VerbatimString */ | |||
code.ss, span.ss { color: #2aa198; } /* SpecialString */ | |||
code.va, span.va { color: #073642; } /* Variable */ | |||
code.cf, span.cf { color: #007020; } /* ControlFlow */ | |||
code.op, span.op { color: #d33682; } /* Operator */ | |||
code.pp, span.pp { color: #bc7a00; } /* Preprocessor */ | |||
code.at, span.at { color: #7d9029; } /* Attribute */ | |||
code.do, span.do { color: #ba2121; } /* Documentation */ | |||
code.an, span.an { color: #60a0b0; } /* Annotation */ | |||
code.cv, span.cv { color: #60a0b0; } /* CommentVar */ |
@ -1,31 +0,0 @@ | |||
$black: #282c34; | |||
$white: #abb2bf; | |||
$light_red: #e06c75; | |||
$dark_red: #be5046; | |||
$green: #98c379; | |||
$light_yellow: #e5c07b; | |||
$dark_yellow: #d19a66; | |||
$blue: #61afef; | |||
$magenta: #c678dd; | |||
$cyan: #56b6c2; | |||
$gutter_grey: #4b5263; | |||
$comment_grey: #5c6370; | |||
$orange: #ffac5f; | |||
$blonde: #f5ddbc; | |||
$light-purple: #f2f1f8; | |||
$purple: #7060eb; | |||
$yugo: #ea8472; | |||
$code-background: $black; | |||
$code-language-background: lighten($black, 5%); | |||
$code-language-color: $white; | |||
$code-fg: $white; | |||
$code-kw: $light_red; | |||
$code-dt: $dark_yellow; | |||
$code-ot: $code-fg; | |||
$code-co: $comment_grey; | |||
// comment |
@ -1,13 +0,0 @@ | |||
@font-face { | |||
font-family: "Computer Modern"; | |||
src: url('/static/cmunorm.woff'); | |||
} | |||
.math { | |||
font-family: Computer Modern, serif; | |||
} | |||
.math .display { | |||
font-size: 18px; | |||
text-align: center; | |||
} |
@ -1,596 +1,524 @@ | |||
$purple: #4834d4; | |||
$orange: #ffbe76; | |||
$blonde: #f5ddbc; | |||
$light-purple: #faf0fa; | |||
$yugo: #ea8472; | |||
$pink_glamour: #ff7979; | |||
$carmine_pink: #eb4d4b; | |||
@import "vars.scss"; | |||
$header: $orange; | |||
$header-height: 50px; | |||
$max-width: 80ch; | |||
.mathpar, .math-paragraph { | |||
@mixin center-that-bitch { | |||
display: flex; | |||
flex-direction: row; | |||
flex-wrap: wrap; | |||
justify-content: space-around; | |||
flex-direction: column; | |||
align-items: center; | |||
> figure { | |||
width: auto; | |||
} | |||
} | |||
a#mastodon { | |||
display: none; | |||
} | |||
html, body { | |||
html { | |||
min-height: 100%; | |||
height: 100%; | |||
margin: 0; | |||
max-width: 100%; | |||
margin: 0; | |||
background-color: white; | |||
font-family: -apple-system,BlinkMacSystemFont,"Segoe UI",Roboto,Oxygen-Sans,Ubuntu,Cantarell,"Helvetica Neue",sans-serif; | |||
overflow-x: clip; | |||
} | |||
body { | |||
display: flex; | |||
flex-direction: column; | |||
font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, Cantarell, 'Open Sans', 'Helvetica Neue', sans-serif; | |||
counter-reset: theorem figure; | |||
} | |||
width: 100%; | |||
div#header { | |||
background-color: $header; | |||
height: $header-height; | |||
@include center-that-bitch; | |||
margin: 0; | |||
color: $bluegray-900; | |||
font-size: $font-size; | |||
overflow-x: clip; | |||
} | |||
body > header { | |||
background-color: $purple-600; | |||
display: flex; | |||
flex-direction: row; | |||
align-items: stretch; | |||
height: $nav-height; | |||
width: 100%; | |||
z-index: 999; | |||
position: fixed; | |||
justify-content: space-between; | |||
flex-wrap: nowrap; | |||
overflow-x: auto; | |||
align-items: center; | |||
div#logo { | |||
margin-left: .5em; | |||
line-height: $header-height; | |||
padding-left: 1em; | |||
padding-right: 1em; | |||
padding-left: .5em; | |||
padding-right: .5em; | |||
font-size: 18pt; | |||
border-bottom: 3px solid $purple-700; | |||
box-sizing: border-box; | |||
a { | |||
color: black; | |||
text-decoration: none; | |||
} | |||
div, nav > a { | |||
height: $nav-height; | |||
padding-left: 0.3em; | |||
padding-right: 0.3em; | |||
transition: background-color .2s ease-in-out; | |||
display: flex; | |||
align-items: center; | |||
} | |||
div#logo:hover { | |||
background-color: darken($header, 10%); | |||
div:hover, nav > a:hover { | |||
background-color: $purple-500; | |||
transition: background-color 0.3s ease-in-out; | |||
} | |||
div#navigation { | |||
margin-right: .5em; | |||
a { | |||
color: white; | |||
font-size: $font-size * 1.2; | |||
text-decoration: none; | |||
} | |||
nav { | |||
display: flex; | |||
flex-direction: row; | |||
align-items: stretch; | |||
align-self: flex-end; | |||
justify-content: flex-end; | |||
align-items: center; | |||
gap: 0.5em; | |||
} | |||
} | |||
line-height: $header-height; | |||
@mixin left-bordered-block($color) { | |||
padding-left: 1em; | |||
padding-top: 0.2em; | |||
padding-bottom: 0.2em; | |||
a { | |||
color: black; | |||
text-decoration: none; | |||
border-left: 5px dashed $color; | |||
} | |||
margin-left: .5em; | |||
margin-right: .5em; | |||
} | |||
@mixin material { | |||
padding: 1em; | |||
div.nav-button { | |||
height: 100%; | |||
transition: background-color .2s ease-in-out; | |||
} | |||
margin-top: 1em; | |||
margin-bottom: 1em; | |||
div.nav-button:hover { | |||
background-color: darken($header, 10%); | |||
} | |||
} | |||
box-shadow: 2px 2px 6px black; | |||
border-radius: 10px; | |||
} | |||
div#content, div.post-list-synopsys { | |||
max-width: $max-width + 4ch; | |||
main { | |||
max-width: 100ch; | |||
width: 100%; | |||
margin: 0px auto 0px auto; | |||
flex: 1 0 auto; | |||
padding: 2ch; | |||
padding-top: $nav-height; | |||
span.katex, span.together { | |||
display: inline-block; | |||
} | |||
box-sizing: border-box; | |||
span.together { | |||
white-space: nowrap; | |||
div#title h2 { | |||
display: none; | |||
} | |||
p { | |||
code { | |||
display: inline-block; | |||
} | |||
span.qed { | |||
float: right; | |||
} | |||
span.theorem ::after { | |||
counter-increment: theorem; | |||
content: " " counter(theorem); | |||
} | |||
span.theorem, span.paragraph-marker { | |||
font-style: italic; | |||
div#post-toc-container { | |||
aside#toc { | |||
display: none; | |||
} | |||
span.word { | |||
display: inline-block; | |||
article { | |||
grid-column: 2; | |||
width: 100%; | |||
line-height: 1.5; | |||
} | |||
} | |||
p.image { | |||
text-align: center !important; | |||
} | |||
h1 { | |||
max-width: 120ch; | |||
} | |||
> article { | |||
> blockquote { | |||
width: 85%; | |||
margin: auto; | |||
border-left: 12px solid $purple; | |||
border-right: 3px dashed $purple; | |||
border-top: 3px dashed $purple; | |||
border-bottom: 3px dashed $purple; | |||
padding: 2ch; | |||
} | |||
> div.code-container { | |||
// width: 80%; | |||
// margin: auto; | |||
pre { | |||
padding: 0px 1em; | |||
width: 80%; | |||
margin: auto; | |||
} | |||
div#post-info { | |||
font-style: italic; | |||
line-height: 1.2; | |||
> span { | |||
display: inline; | |||
} | |||
} | |||
@include left-bordered-block($bluegray-500); | |||
} | |||
} | |||
div.code-container { | |||
padding-top: 5px; | |||
background-color: $light-purple; | |||
border-radius: 5px; | |||
border: 1px solid darken($light-purple, 10%); | |||
overflow-x: auto; | |||
> div.code-tag { | |||
background-color: darken($light-purple, 10%); | |||
overflow: hidden; | |||
line-height: 80%; | |||
font-size: 12pt; | |||
margin-top: 5px; | |||
width: 100%; | |||
span { | |||
padding: 0.5em 1em 0.25em 1em; | |||
border-top-left-radius: 10px; | |||
div.warning { | |||
@include material; | |||
background-color: $red-200; | |||
} | |||
box-shadow: 1px 1px darken($light-purple, 20%); | |||
// display: none; | |||
figure.wraparound { | |||
float: right; | |||
width: auto; | |||
float: right; | |||
} | |||
} | |||
margin-left: 2em; | |||
} | |||
pre { | |||
overflow-x: auto; | |||
} | |||
figure { | |||
overflow-x: auto; | |||
overflow-y: clip; | |||
width: 100%; | |||
margin: auto; | |||
div.sourceCode { | |||
padding: 0px .5em; | |||
} | |||
@include center-that-bitch; | |||
figcaption { | |||
margin-top: 0.3em; | |||
display: inline-block; | |||
text-align: center; | |||
} | |||
div.code-container.custom-tag { | |||
> span::after { | |||
display: none; | |||
} | |||
p { | |||
margin: 0; | |||
} | |||
} | |||
div.code-container.continues { | |||
> span { | |||
display: none; | |||
ol, ul { | |||
padding-left: 1.2em; | |||
li { | |||
margin-top: 5px; | |||
margin-bottom: 5px; | |||
p { | |||
margin-top: 5px; | |||
margin-bottom: 5px; | |||
} | |||
} | |||
} | |||
div.warning { | |||
&:before { | |||
content: "Heads up!"; | |||
display: block; | |||
font-size: 16pt; | |||
} | |||
margin: 16px auto; | |||
width: 85%; | |||
border-left: 12px solid $carmine_pink; | |||
border-right: 3px dashed $carmine_pink; | |||
border-top: 3px dashed $carmine_pink; | |||
border-bottom: 3px dashed $carmine_pink; | |||
padding: 2ch; | |||
&>:first-child { | |||
margin-top: 0.5em; | |||
} | |||
&>:last-child { | |||
margin-bottom: 0px; | |||
} | |||
.katex-display { | |||
> span.katex { | |||
white-space: normal; | |||
} | |||
} | |||
div.text-image { | |||
display: flex; | |||
margin-bottom: -18px; | |||
div.mathpar { | |||
display: flex; | |||
flex-flow: row wrap; | |||
justify-content: center; | |||
align-items: center; | |||
> figure { | |||
min-width: 25px; | |||
padding-left: 2em; | |||
} | |||
} | |||
gap: 1em; | |||
* { | |||
max-width: 100%; | |||
> figure { | |||
width: auto; | |||
max-width: 33%; | |||
} | |||
} | |||
details { | |||
summary { | |||
margin-left: 1em; | |||
font-size: 14pt; | |||
padding: .2em 1em; | |||
} | |||
border-radius: 5px; | |||
border: 1px solid $purple; | |||
padding: .2em 1em; | |||
background-color: white; | |||
margin-top: 0.5em; | |||
margin-bottom: 0.5em; | |||
transition: font-size 0.3s ease-in-out, background-color 0.3s ease-in-out; | |||
transform: rotate3d(0, 0, 0, 0); | |||
} | |||
div.columns { | |||
blockquote, details.blockquote { | |||
padding-right: 1em; | |||
padding-left: 1em; | |||
padding-top: 0.2em; | |||
padding-bottom: 0.2em; | |||
details[open] { | |||
font-size: 14pt; | |||
border: 0; | |||
} | |||
} | |||
.special-thanks { | |||
display: flex; | |||
flex-direction: column; | |||
align-items: center; | |||
justify-content: space-around; | |||
p { | |||
font-size: 21pt; | |||
margin-top: .5em; | |||
margin-bottom: 0px; | |||
} | |||
code, pre, .sourceCode { | |||
font-size: $font-size * 1.2; | |||
font-family: 'Iosevka', 'Fantasque Sans Mono', Consolas, "Andale Mono WT", "Andale Mono", "Lucida Console", "Lucida Sans Typewriter", "DejaVu Sans Mono", "Bitstream Vera Sans Mono", "Liberation Mono", "Nimbus Mono L", Monaco, "Courier New", Courier, monospace; | |||
font-weight: 500; | |||
} | |||
ul { | |||
list-style: none; | |||
li { | |||
margin: .5em 0px; | |||
border-radius: 10px; | |||
border: 1px solid $purple; | |||
background-color: $light-purple; | |||
padding: 1em; | |||
a { | |||
text-decoration: underline; | |||
} | |||
div.sourceCode, pre { | |||
background-color: $code-bg; | |||
color: $code-fg; | |||
flex-grow: 0; | |||
transition: all 0.2s ease-in-out; | |||
} | |||
@include material; | |||
li:hover { | |||
border-radius: 5px; | |||
transform: scale(1.01); | |||
background-color: lighten($light-purple, 5%); | |||
} | |||
} | |||
overflow-x: auto; | |||
line-height: 1.2; | |||
code { | |||
display: block; | |||
} | |||
.eqn-list { | |||
list-style: none; | |||
direction: rtl; | |||
counter-reset: equation; | |||
li { | |||
counter-increment: equation; | |||
> pre { | |||
padding: unset; | |||
margin-top: unset; | |||
margin-bottom: unset; | |||
box-shadow: unset; | |||
span.katex-display { | |||
margin: 2px; | |||
} | |||
* { | |||
direction: ltr; | |||
} | |||
} | |||
margin: 0; | |||
li::marker { | |||
content: "(" counter(equation) ")"; | |||
font-family: KaTeX_Main, Times New Roman, serif; | |||
} | |||
} | |||
font-size: 14pt; | |||
line-height: 1.4; | |||
span.katex-display { | |||
overflow-x: auto; | |||
overflow-y: clip; | |||
} | |||
} | |||
.footnote-back { | |||
margin-left: 0.5em; | |||
p > code { | |||
white-space: nowrap; | |||
} | |||
div.info, span#reading-length { | |||
padding-left: 1em; | |||
font-style: italic; | |||
} | |||
blockquote, details.blockquote { | |||
@include material; | |||
span#reading-length::before { | |||
content: "Word count: " | |||
} | |||
background-color: $purple-100; | |||
div#footer { | |||
display: flex; | |||
align-items: center; | |||
justify-content: space-between; | |||
margin-left: 0; | |||
margin-right: 0; | |||
padding-left: 1em; | |||
padding-right: 1em; | |||
background-color: $light-purple; | |||
height: 50px; | |||
h2 { | |||
margin-top: 0; | |||
} | |||
} | |||
.definition { | |||
text-decoration: dotted underline; | |||
table { | |||
width: 70%; | |||
margin: auto; | |||
border-collapse: collapse; | |||
td, th { | |||
text-align: center; | |||
padding: 0px 1em 0px 1em; | |||
border: 2px solid $purple-400; | |||
} | |||
} | |||
.post-list { | |||
list-style: none; | |||
padding: 0px; | |||
ul#post-list { | |||
list-style-type: none; | |||
display: flex; | |||
flex-direction: column; | |||
width: 90%; | |||
margin: auto; | |||
.post-list-item { | |||
@include left-bordered-block($yellow-500); | |||
@include material; | |||
div.post-list-item { | |||
margin-top: .2em; | |||
margin-bottom: .2em; | |||
margin: 0; | |||
padding: 1em; | |||
background-color: $yellow-50; | |||
border-radius: 10px; | |||
background-color: $light-purple; | |||
.post-list-header { | |||
margin-top: 0.2em; | |||
div.post-list-header { | |||
display: flex; | |||
justify-content: space-between; | |||
flex-wrap: wrap; | |||
align-items: flex-end; | |||
line-height: 14pt; | |||
font-style: italic; | |||
font-size: 10pt; | |||
a { | |||
font-size: 14pt; | |||
padding-right: 2em; | |||
font-style: normal; | |||
color: $bluegray-800; | |||
} | |||
} | |||
font-size: 11pt; | |||
} | |||
} | |||
table { | |||
margin: auto; | |||
border-collapse: collapse; | |||
div.contact-list { | |||
display: flex; | |||
justify-content: space-evenly; | |||
align-items: stretch; | |||
gap: 3em; | |||
td, th { | |||
border: 1px solid $purple; | |||
text-align: center; | |||
padding: 0px 1em 0px 1em; | |||
div.contact-card { | |||
background-color: $purple-200; | |||
@include material; | |||
width: 33%; | |||
max-width: 33%; | |||
flex-grow: 1; | |||
> * { | |||
vertical-align: middle; | |||
p { | |||
margin: 0; | |||
} | |||
} | |||
td.image { | |||
padding: 0px; | |||
img { | |||
margin-top: auto; | |||
div.contact-header { | |||
// I really hate Pandoc sometimes | |||
display: flex; | |||
align-items: center; | |||
gap: 1em; | |||
margin-bottom: 10px; | |||
img { | |||
height: 48px; | |||
clip-path: url(#squircle); | |||
} | |||
span.username { | |||
font-size: 16pt; | |||
} | |||
} | |||
} | |||
} | |||
figure { | |||
width: 100%; | |||
margin: auto; | |||
display: flex; | |||
flex-direction: column; | |||
align-items: center; | |||
justify-content: space-around; | |||
div { | |||
width: 100%; | |||
overflow-x: auto; | |||
@media only screen and (max-width: 450px) { | |||
body > header { | |||
div#logo { | |||
width: 100%; | |||
display: flex; | |||
flex-direction: row; | |||
justify-content: center; | |||
} | |||
nav { | |||
display: none; | |||
} | |||
} | |||
} | |||
> p { | |||
margin-bottom: 0px; | |||
@media only screen and (min-width: 1500px) { | |||
.narrow-only { | |||
display: none !important; | |||
} | |||
figcaption { | |||
font-size: 14pt; | |||
display: inline-block; | |||
main { | |||
max-width: 100%; | |||
> div#title { | |||
font-size: 15pt; | |||
h1, h2 { | |||
margin: 0; | |||
} | |||
h2 { | |||
font-style: italic; | |||
font-weight: normal; | |||
display: block; | |||
z-index: 1; | |||
} | |||
> p { | |||
margin-top: 0px; | |||
margin-bottom: 0px; | |||
margin-top: 0.5em; | |||
margin-bottom: 1em; | |||
@include center-that-bitch; | |||
} | |||
} | |||
// figcaption::before { | |||
// counter-increment: figure; | |||
// content: "Figure " counter(figure) ". "; | |||
// display: inline; | |||
// } | |||
} | |||
div#post-toc-container { | |||
display: grid; | |||
grid-template-columns: 0.5fr 2fr 0.5fr; | |||
gap: 1em; | |||
@media only screen and (max-width: $max-width) { | |||
div#content { | |||
margin-left: 1px; | |||
margin-right: 1px; | |||
aside#toc { | |||
display: block !important; | |||
padding: 1ch; | |||
h3 { @include center-that-bitch; } | |||
div.text-image { | |||
display: block; | |||
> figure { | |||
padding-left: 0px; | |||
} | |||
} | |||
div#toc-container { | |||
overflow-x: hidden; | |||
width: 100%; | |||
position: sticky; | |||
top: 2em; | |||
figure figcaption { | |||
max-width: 85%; | |||
margin: auto; | |||
overflow-y: auto; | |||
max-height: 90vh; | |||
bottom: 2em; | |||
ul { | |||
border-left: 2px solid $bluegray-400; | |||
list-style-type: none; | |||
padding-left: 1em; | |||
a { | |||
text-decoration: none; | |||
} | |||
a:hover { | |||
text-decoration: underline; | |||
} | |||
} | |||
} | |||
p { | |||
font-style: italic; | |||
} | |||
} | |||
ul { | |||
padding-left: 20px; | |||
article { | |||
max-width: 100ch; | |||
margin-top: -100px; | |||
margin: auto; | |||
} | |||
} | |||
p { | |||
hyphens: auto; | |||
div.columns { | |||
display: grid; | |||
grid-template-columns: 1fr 1fr; | |||
gap: 1em; | |||
span.katex { | |||
display: inline; | |||
} | |||
} | |||
} | |||
} | |||
.mathpar { | |||
overflow-x: auto; | |||
justify-content: space-evenly; | |||
#index { | |||
padding-top: 4em; | |||
> * { | |||
margin-left: .2em; | |||
margin-right: .2em; | |||
flex-shrink: 0; | |||
flex-grow: 0; | |||
a.ico-left { | |||
img { | |||
clip-path: url(#squircle); | |||
width: 96px; | |||
height: 96px; | |||
} | |||
float: left; | |||
margin-right: 1em; | |||
width: 96px; | |||
height: 96px; | |||
} | |||
header { | |||
nav div { | |||
&>:nth-child(3) { | |||
display: none; | |||
} | |||
a.ico-right { | |||
img { | |||
clip-path: url(#squircle); | |||
width: 96px; | |||
height: 96px; | |||
} | |||
float: right; | |||
margin-left: 1em; | |||
width: 96px; | |||
height: 96px; | |||
} | |||
} | |||
// Contact page | |||
.contact-list { | |||
display: flex; | |||
flex-direction: row; | |||
flex-wrap: wrap; | |||
justify-content: space-around; | |||
align-items: center; | |||
.contact-card { | |||
margin: 0px .5em; | |||
border: 1px solid $purple; | |||
border-radius: 10px; | |||
background-color: $blonde; | |||
padding: 0px 1em; | |||
div#social { | |||
display: flex; | |||
flex-direction: row; | |||
justify-content: center; | |||
width: 100%; | |||
gap: 8px; | |||
.username, .username * { | |||
font-size: 21pt; | |||
color: $purple; | |||
img { | |||
width: 48px; | |||
height: 48px; | |||
clip-path: url(#squircle); | |||
transition: width 0.25s, height 0.25s; | |||
&:hover { | |||
width: 54px; | |||
height: 54px; | |||
} | |||
} | |||
p { | |||
display: flex; | |||
align-items: center; | |||
justify-content: space-evenly; | |||
max-width: 40ch !important; | |||
a { | |||
filter: drop-shadow(2px 2px 3px rgba(50, 50, 0, 0.5)); | |||
height: 54px; | |||
} | |||
transition: all 0.2s ease-in-out; | |||
} | |||
} | |||
.contact-card:hover { | |||
margin: 0px .55em; | |||
background-color: darken($blonde, 10%); | |||
border-radius: 5px; | |||
transform: scale(1.01); | |||
@media only screen and (min-width: 1500px) { | |||
#index { | |||
display: grid; | |||
grid-template-columns: 0.20fr 0.75fr 0.20fr 1fr 0.20fr; | |||
} | |||
} | |||
span.math.inline { | |||
display: inline-block; | |||
} | |||
@font-face { | |||
font-family: 'Fantasque Sans Mono'; | |||
src: url('fonts/FantasqueSansMono-Regular.woff2') format('woff2'); | |||
font-weight: 400; | |||
font-style: normal; | |||
details { | |||
margin-top: 1em; | |||
margin-bottom: 1em; | |||
} | |||
// Styles for code | |||
code.kw, span.kw { color: $code-pink; } /* Keyword */ | |||
code.dt, span.dt { color: $code-blue; } /* DataType */ | |||
code.dv, span.dv { color: $code-orange; } /* DecVal */ | |||
code.bn, span.bn { color: $code-orange; } /* BaseN */ | |||
code.fl, span.fl { color: $code-orange; } /* Float */ | |||
code.ch, span.ch { color: $code-green; } /* Char */ | |||
code.st, span.st { color: $code-green; } /* String */ | |||
code.co, span.co { color: $code-grey; } /* Comment */ | |||
code.ot, span.ot { color: $code-green; } /* Other */ | |||
code.al, span.al { color: #ff0000; } /* Alert */ | |||
code.fu, span.fu { color: $code-fg; } /* Function */ | |||
code.er, span.er { color: #ff0000; } /* Error */ | |||
code.wa, span.wa { color: #60a0b0; } /* Warning */ | |||
code.cn, span.cn { color: $code-orange; } /* Constant */ | |||
code.sc, span.sc { color: $code-yellow; } /* SpecialChar */ | |||
code.vs, span.vs { color: $code-blue; } /* VerbatimString */ | |||
code.ss, span.ss { color: $code-green; } /* SpecialString */ | |||
code.va, span.va { color: $code-fg; } /* Variable */ | |||
code.cf, span.cf { color: $code-pink; } /* ControlFlow */ | |||
code.op, span.op { color: $code-green; } /* Operator */ | |||
code.pp, span.pp { color: $code-orange; } /* Preprocessor */ | |||
code.at, span.at { color: $code-green; } /* Attribute */ | |||
code.do, span.do { color: $code-red; } /* Documentation */ | |||
code.an, span.an { color: $code-red; } /* Annotation */ | |||
code.cv, span.cv { color: $code-red; } /* CommentVar */ |
@ -0,0 +1,419 @@ | |||
@font-face { | |||
font-family: 'Iosevka'; | |||
font-display: swap; | |||
font-weight: 400; | |||
font-stretch: normal; | |||
font-style: normal; | |||
src: url('/static/woff2/iosevk-abbie-regular.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-regular.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka'; | |||
font-display: swap; | |||
font-weight: 400; | |||
font-stretch: expanded; | |||
font-style: normal; | |||
src: url('/static/woff2/iosevk-abbie-extended.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extended.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka'; | |||
font-display: swap; | |||
font-weight: 400; | |||
font-stretch: normal; | |||
font-style: oblique; | |||
src: url('/static/woff2/iosevk-abbie-oblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-oblique.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka Oblique'; | |||
font-display: swap; | |||
font-weight: 400; | |||
font-stretch: normal; | |||
src: url('/static/woff2/iosevk-abbie-oblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-oblique.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka'; | |||
font-display: swap; | |||
font-weight: 400; | |||
font-stretch: expanded; | |||
font-style: oblique; | |||
src: url('/static/woff2/iosevk-abbie-extendedoblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendedoblique.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka Oblique'; | |||
font-display: swap; | |||
font-weight: 400; | |||
font-stretch: expanded; | |||
src: url('/static/woff2/iosevk-abbie-extendedoblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendedoblique.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka'; | |||
font-display: swap; | |||
font-weight: 400; | |||
font-stretch: normal; | |||
font-style: italic; | |||
src: url('/static/woff2/iosevk-abbie-italic.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-italic.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka'; | |||
font-display: swap; | |||
font-weight: 400; | |||
font-stretch: expanded; | |||
font-style: italic; | |||
src: url('/static/woff2/iosevk-abbie-extendeditalic.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendeditalic.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka'; | |||
font-display: swap; | |||
font-weight: 500; | |||
font-stretch: normal; | |||
font-style: normal; | |||
src: url('/static/woff2/iosevk-abbie-medium.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-medium.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka'; | |||
font-display: swap; | |||
font-weight: 500; | |||
font-stretch: expanded; | |||
font-style: normal; | |||
src: url('/static/woff2/iosevk-abbie-extendedmedium.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendedmedium.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka'; | |||
font-display: swap; | |||
font-weight: 500; | |||
font-stretch: normal; | |||
font-style: oblique; | |||
src: url('/static/woff2/iosevk-abbie-mediumoblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-mediumoblique.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka Oblique'; | |||
font-display: swap; | |||
font-weight: 500; | |||
font-stretch: normal; | |||
src: url('/static/woff2/iosevk-abbie-mediumoblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-mediumoblique.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka'; | |||
font-display: swap; | |||
font-weight: 500; | |||
font-stretch: expanded; | |||
font-style: oblique; | |||
src: url('/static/woff2/iosevk-abbie-extendedmediumoblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendedmediumoblique.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka Oblique'; | |||
font-display: swap; | |||
font-weight: 500; | |||
font-stretch: expanded; | |||
src: url('/static/woff2/iosevk-abbie-extendedmediumoblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendedmediumoblique.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka'; | |||
font-display: swap; | |||
font-weight: 500; | |||
font-stretch: normal; | |||
font-style: italic; | |||
src: url('/static/woff2/iosevk-abbie-mediumitalic.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-mediumitalic.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka'; | |||
font-display: swap; | |||
font-weight: 500; | |||
font-stretch: expanded; | |||
font-style: italic; | |||
src: url('/static/woff2/iosevk-abbie-extendedmediumitalic.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendedmediumitalic.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka'; | |||
font-display: swap; | |||
font-weight: 600; | |||
font-stretch: normal; | |||
font-style: normal; | |||
src: url('/static/woff2/iosevk-abbie-semibold.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-semibold.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka'; | |||
font-display: swap; | |||
font-weight: 600; | |||
font-stretch: expanded; | |||
font-style: normal; | |||
src: url('/static/woff2/iosevk-abbie-extendedsemibold.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendedsemibold.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka'; | |||
font-display: swap; | |||
font-weight: 600; | |||
font-stretch: normal; | |||
font-style: oblique; | |||
src: url('/static/woff2/iosevk-abbie-semiboldoblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-semiboldoblique.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka Oblique'; | |||
font-display: swap; | |||
font-weight: 600; | |||
font-stretch: normal; | |||
src: url('/static/woff2/iosevk-abbie-semiboldoblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-semiboldoblique.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka'; | |||
font-display: swap; | |||
font-weight: 600; | |||
font-stretch: expanded; | |||
font-style: oblique; | |||
src: url('/static/woff2/iosevk-abbie-extendedsemiboldoblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendedsemiboldoblique.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka Oblique'; | |||
font-display: swap; | |||
font-weight: 600; | |||
font-stretch: expanded; | |||
src: url('/static/woff2/iosevk-abbie-extendedsemiboldoblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendedsemiboldoblique.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka'; | |||
font-display: swap; | |||
font-weight: 600; | |||
font-stretch: normal; | |||
font-style: italic; | |||
src: url('/static/woff2/iosevk-abbie-semibolditalic.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-semibolditalic.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka'; | |||
font-display: swap; | |||
font-weight: 600; | |||
font-stretch: expanded; | |||
font-style: italic; | |||
src: url('/static/woff2/iosevk-abbie-extendedsemibolditalic.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendedsemibolditalic.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka'; | |||
font-display: swap; | |||
font-weight: 700; | |||
font-stretch: normal; | |||
font-style: normal; | |||
src: url('/static/woff2/iosevk-abbie-bold.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-bold.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka'; | |||
font-display: swap; | |||
font-weight: 700; | |||
font-stretch: expanded; | |||
font-style: normal; | |||
src: url('/static/woff2/iosevk-abbie-extendedbold.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendedbold.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka'; | |||
font-display: swap; | |||
font-weight: 700; | |||
font-stretch: normal; | |||
font-style: oblique; | |||
src: url('/static/woff2/iosevk-abbie-boldoblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-boldoblique.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka Oblique'; | |||
font-display: swap; | |||
font-weight: 700; | |||
font-stretch: normal; | |||
src: url('/static/woff2/iosevk-abbie-boldoblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-boldoblique.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka'; | |||
font-display: swap; | |||
font-weight: 700; | |||
font-stretch: expanded; | |||
font-style: oblique; | |||
src: url('/static/woff2/iosevk-abbie-extendedboldoblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendedboldoblique.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka Oblique'; | |||
font-display: swap; | |||
font-weight: 700; | |||
font-stretch: expanded; | |||
src: url('/static/woff2/iosevk-abbie-extendedboldoblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendedboldoblique.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka'; | |||
font-display: swap; | |||
font-weight: 700; | |||
font-stretch: normal; | |||
font-style: italic; | |||
src: url('/static/woff2/iosevk-abbie-bolditalic.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-bolditalic.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka'; | |||
font-display: swap; | |||
font-weight: 700; | |||
font-stretch: expanded; | |||
font-style: italic; | |||
src: url('/static/woff2/iosevk-abbie-extendedbolditalic.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendedbolditalic.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka'; | |||
font-display: swap; | |||
font-weight: 800; | |||
font-stretch: normal; | |||
font-style: normal; | |||
src: url('/static/woff2/iosevk-abbie-extrabold.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extrabold.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka'; | |||
font-display: swap; | |||
font-weight: 800; | |||
font-stretch: expanded; | |||
font-style: normal; | |||
src: url('/static/woff2/iosevk-abbie-extendedextrabold.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendedextrabold.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka'; | |||
font-display: swap; | |||
font-weight: 800; | |||
font-stretch: normal; | |||
font-style: oblique; | |||
src: url('/static/woff2/iosevk-abbie-extraboldoblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extraboldoblique.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka Oblique'; | |||
font-display: swap; | |||
font-weight: 800; | |||
font-stretch: normal; | |||
src: url('/static/woff2/iosevk-abbie-extraboldoblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extraboldoblique.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka'; | |||
font-display: swap; | |||
font-weight: 800; | |||
font-stretch: expanded; | |||
font-style: oblique; | |||
src: url('/static/woff2/iosevk-abbie-extendedextraboldoblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendedextraboldoblique.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka Oblique'; | |||
font-display: swap; | |||
font-weight: 800; | |||
font-stretch: expanded; | |||
src: url('/static/woff2/iosevk-abbie-extendedextraboldoblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendedextraboldoblique.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka'; | |||
font-display: swap; | |||
font-weight: 800; | |||
font-stretch: normal; | |||
font-style: italic; | |||
src: url('/static/woff2/iosevk-abbie-extrabolditalic.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extrabolditalic.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka'; | |||
font-display: swap; | |||
font-weight: 800; | |||
font-stretch: expanded; | |||
font-style: italic; | |||
src: url('/static/woff2/iosevk-abbie-extendedextrabolditalic.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendedextrabolditalic.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka'; | |||
font-display: swap; | |||
font-weight: 900; | |||
font-stretch: normal; | |||
font-style: normal; | |||
src: url('/static/woff2/iosevk-abbie-heavy.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-heavy.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka'; | |||
font-display: swap; | |||
font-weight: 900; | |||
font-stretch: expanded; | |||
font-style: normal; | |||
src: url('/static/woff2/iosevk-abbie-extendedheavy.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendedheavy.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka'; | |||
font-display: swap; | |||
font-weight: 900; | |||
font-stretch: normal; | |||
font-style: oblique; | |||
src: url('/static/woff2/iosevk-abbie-heavyoblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-heavyoblique.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka Oblique'; | |||
font-display: swap; | |||
font-weight: 900; | |||
font-stretch: normal; | |||
src: url('/static/woff2/iosevk-abbie-heavyoblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-heavyoblique.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka'; | |||
font-display: swap; | |||
font-weight: 900; | |||
font-stretch: expanded; | |||
font-style: oblique; | |||
src: url('/static/woff2/iosevk-abbie-extendedheavyoblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendedheavyoblique.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka Oblique'; | |||
font-display: swap; | |||
font-weight: 900; | |||
font-stretch: expanded; | |||
src: url('/static/woff2/iosevk-abbie-extendedheavyoblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendedheavyoblique.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka'; | |||
font-display: swap; | |||
font-weight: 900; | |||
font-stretch: normal; | |||
font-style: italic; | |||
src: url('/static/woff2/iosevk-abbie-heavyitalic.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-heavyitalic.ttf') format('truetype'); | |||
} | |||
@font-face { | |||
font-family: 'Iosevka'; | |||
font-display: swap; | |||
font-weight: 900; | |||
font-stretch: expanded; | |||
font-style: italic; | |||
src: url('/static/woff2/iosevk-abbie-extendedheavyitalic.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendedheavyitalic.ttf') format('truetype'); | |||
} |
@ -0,0 +1,62 @@ | |||
$purple-50: #faf5ff; | |||
$purple-100: #f3e8ff; | |||
$purple-200: #e9d5ff; | |||
$purple-300: #d8b4fe; | |||
$purple-400: #c084fc; | |||
$purple-500: #a855f7; | |||
$purple-600: #9333ea; | |||
$purple-700: #7e22ce; | |||
$purple-800: #6b21a8; | |||
$purple-900: #581c87; | |||
$yellow-50: #fefce8; | |||
$yellow-100: #fef9c3; | |||
$yellow-200: #fef08a; | |||
$yellow-300: #fde047; | |||
$yellow-400: #facc15; | |||
$yellow-500: #eab308; | |||
$yellow-600: #ca8a04; | |||
$yellow-700: #a16207; | |||
$yellow-800: #854d0e; | |||
$yellow-900: #713f12; | |||
$bluegray-50: #f8fafc; | |||
$bluegray-100: #f1f5f9; | |||
$bluegray-200: #e2e8f0; | |||
$bluegray-300: #cbd5e1; | |||
$bluegray-400: #94a3b8; | |||
$bluegray-500: #64748b; | |||
$bluegray-600: #475569; | |||
$bluegray-700: #334155; | |||
$bluegray-800: #1e293b; | |||
$bluegray-900: #0f172a; | |||
$red-50: #fef2f2; | |||
$red-100: #fee2e2; | |||
$red-200: #fecaca; | |||
$red-300: #fca5a5; | |||
$red-400: #f87171; | |||
$red-500: #ef4444; | |||
$red-600: #dc2626; | |||
$red-700: #b91c1c; | |||
$red-800: #991b1b; | |||
$red-900: #7f1d1d; | |||
$nav-height: 48px; | |||
$font-size: 14pt; | |||
$code-bg: #282C34; | |||
$code-fg: #ABB2BF; | |||
$code-red: #D65122; | |||
$code-red-br: #AE3B36; | |||
$code-green: #88B966; | |||
$code-yellow: #DEB468; | |||
$code-orange: #C58853; | |||
$code-blue: #519DEB; | |||
$code-pink: #C678DD; | |||
$code-cyan: #48A8B5; | |||
$code-white: #ABB2BF; | |||
$code-grey: #7F848E; | |||
// foo |
@ -2,12 +2,78 @@ | |||
title: Home | |||
--- | |||
<p> | |||
You've reached Abigail's blog, a place where I exposit and reflect on programming languages and type theory. | |||
Here's the posts I've written recently: | |||
<div id=index> | |||
<div style="grid-column: 2;"> | |||
<h2 id=hi>Hi!</h2> | |||
<p style="height: 98px"> | |||
<a class=ico-left href="/"> | |||
<img alt="profile picture" decoding=async src="/static/icon/[email protected]" /> | |||
</a> | |||
I'm Amélia, a non-binary (they/them) mathematician & programmer. This | |||
blog is where I write about programming languages: their implementation, | |||
their semantics, etc. | |||
</p> | |||
<hr /> | |||
<div id=social> | |||
<a href="https://twitter.com/plt_amy"> | |||
<img class=social decoding=async title="My Twitter profile" src="/static/svg/twitter.svg" style="background-color: #1DA1F2;" /> | |||
</a> | |||
<a href="https://github.com/plt-amy"> | |||
<img class=social decoding=async title="My GitHub profile" src="/static/svg/github.svg" style="background-color: black;" /> | |||
</a> | |||
<a href="https://git.amelia.how"> | |||
<img class=social decoding=async title="My personal Gitea" src="/static/svg/gitea.svg" style="background-color: #609926;" /> | |||
</a> | |||
<a href="https://ko-fi.com/plt_amy"> | |||
<img class=social decoding=async title="Buy me a coffee on Ko-Fi" src="/static/svg/kofi.svg" style="background-color: #434B57;" /> | |||
</a> | |||
</div> | |||
<hr /> | |||
<p style="height: 98px"> | |||
<a class=ico-right href="https://cubical.1lab.dev"> | |||
<img class=ico-right alt="cube" decoding=async src="/static/icon/cube-128x.png" /> | |||
</a> | |||
In addition to this blog, I maintain <a | |||
href="https://cubical.1lab.dev">the 1Lab</a>, a formalised, cross-linked | |||
reference resource for Homotopy Type Theory, done in Cubical Agda. | |||
</p> | |||
<hr /> | |||
<p style="height: 98px"> | |||
<a class=ico-left href="https://amulet.works"> | |||
<img alt="amulet" decoding=async src="/static/svg/amulet.svg" /> | |||
</a> | |||
My most significant project other than this blog and the 1lab is <a | |||
href="https://amulet.works">Amulet</a>, a functional programming | |||
language in the ML tradition with support for advanced type-level | |||
programming. | |||
</p> | |||
</div> | |||
<div style="grid-column: 4;"> | |||
<h2>Posts</h2> | |||
<p> | |||
Here are the lastest 5 posts from the blog: | |||
</p> | |||
$partial("templates/post-list.html")$ | |||
<p>…or you can find more in the <a href="/archive.html">archives</a>.</p> | |||
<p>…or you can find more in the <a href="/archive.html">archives</a>.</p> | |||
</div> | |||
</div> | |||
<style> | |||
main > div#title { | |||
display: none; | |||
} | |||
</style> |
@ -0,0 +1,884 @@ | |||
--- | |||
title: "Parsing Layout, or: Haskell's Syntax is a Mess" | |||
date: September 3rd, 2021 | |||
abbreviations: | |||
sparkles: ✨ | |||
--- | |||
Hello! Today we're going to talk about something I'm actually _good_ at, for a change: writing compilers. Specifically, I'm going to demonstrate how to wrangle [Alex] and [Happy] to implement a parser for a simple language with the same indentation sensitive parsing behaviour as Haskell, the _layout rule_. | |||
[Alex]: https://www.haskell.org/alex/ | |||
[Happy]: https://www.haskell.org/happy/ | |||
Alex and Happy are incredibly important parts of the Haskell ecosystem. If you're a Haskeller, you use a program using an Alex lexer and a Happy parser _every single day_ - every single working day, at least - GHC! Despite this fundamental importance, Alex and Happy are... _sparsely_ documented, to say the least. Hopefully this post can serve as an example of how to do something non-trivial using them. | |||
However! While I'm going to talk about Alex and Happy here, it would be entirely possible to write a layout parser using Alex and whatever flavour of Parsec is popular this week, as long as your combinators are expressed on top of a monad transformer. It's also entirely possible to write a layout parser without Alex at all, but that's beyond my abilities. I am a mere mortal, after all. | |||
Get ready to read the word "layout" a lot. Layout layout layout. How's your semantic satiation going? Should I say layout a couple more times? | |||
# The Offside Rule | |||
So, how does Haskell layout work? A small subset of tokens (`where`, `of`, `let`, `do`[^1]), called _layout keywords_, are followed by a _laid out block_ (my terminology). The happiest (hah) case is where one of these keywords is followed by a `{` token. In this case, layout parsing doesn't happen at all! | |||
[^1]: GHC extends this set to also contain the "token" `\case`. However, `LambdaCase` isn't a single token! The &sparkles; correct &sparkles; specification is that `case` is a layout keyword if the preceding token is `\`. | |||
```{.haskell .notag} | |||
main = do { putStrLn | |||
"foo" | |||
; putStrLn "bar" | |||
; putStrLn "quux" } | |||
``` | |||
This _abomination_ is perfectly valid Haskell code, since layout is disabled in a context that was started with a `{`. Great success though, since this is a very simple thing to support in a parser. The unhappy case is when we actually have to do layout parsing. In that case, the starting column of the token immediately following the layout token becomes the _reference column_ (again my terminology), we emit a (virtual) opening brace, and the **offside rule** applies. | |||
The offside rule says that a player must have at least two opposing players, counting the goalkeep- No no, that's not right. Give me a second. Ah! Yes. The offside rule governs automatic insertion of (virtual) semicolons and closing braces. When we encounter the first token of a new line, we are burdened to compare its starting column with the reference: | |||
- If it's on the same column as the reference column, we emit a semicolon. This is a new statement/declaration/case. | |||
<div class=mathpar> | |||
```haskell | |||
do foo | |||
bar | |||
-- ^ same column, insert ; before. | |||
``` | |||
```haskell | |||
do | |||
foo | |||
bar | |||
-- ^ same column, insert ; before. | |||
-- yes, three spaces | |||
``` | |||
</div> | |||
The two token streams above have the same prefix as `do { foo; bar }`{.haskell}. | |||
- If it's further indented than the reference column, we.. do nothing! Just go back to normal lexing. Tokens indented to the right of the reference column are interpreted as continuing the statement in the previous line. That's why you can do this: | |||
```haskell | |||
do | |||
putStrLn $ | |||
wavy | |||
function | |||
application | |||
please | |||
don't | |||
though | |||
``` | |||
_All_ of those tokens are (in addition to being the first token in a line) indented further than `putStrLn`, which is our reference column. This block has no semicolons at all! | |||
- If it's less indented than the reference column, we emit a virtual closing `}` (to end the block) and _**apply the rule again**_. This last bit is crucial: it says a single token can end all of the layout contexts it's leaving. For instance: | |||
```haskell | |||
foo = do a -- context 1 | |||
do b -- context 2 | |||
do c -- context 3 | |||
do d -- context 4 | |||
e | |||
bar = 123 | |||
``` | |||
Assuming there was a layout context at the first column, i.e., we're in a module, then the token `bar` will be responsible for closing 4 whole layout contexts: | |||
- It's to the left of `d`, so it closes context 4; | |||
- It's to the left of `c`, so it closes context 3; | |||
- It's to the left of `b`, so it closes context 2; | |||
- It's to the left of `a`, so it closes context 1. | |||
With all the semicolons we have a right to, the code above is this: | |||
``` haskell | |||
; foo = do { a -- context 1 | |||
; do { b -- context 2 | |||
; do { c -- context 3 | |||
; do { d -- context 4 | |||
; e | |||
} | |||
} | |||
} | |||
} | |||
; bar = 123 | |||
``` | |||
Why do we have semicolons before `foo` and `bar`? Why, because they're in the same column as the reference token, which was presumably an import or something. | |||
# Laid-out blocks | |||
With that, the parser productions for laid out blocks should be clear - or, at least, easily approximable. Right? | |||
Wrong. | |||
You might think the production for `do` blocks is something like the following, and you'd be forgiven for doing so. It's clean, it's reasonable, it's not _actually_ Happy syntax, but it's a close enough approximation. Except that it's way incorrect! | |||
``` | |||
expr | |||
: ... | |||
| 'do' '{' statement ';' ... '}' { ... } | |||
| 'do' VOpen statement VSemi ... VClose { ... } | |||
``` | |||
Well, for `do` you might be able to get away with that. But consider the laid-out code on the left, and what the lexer naïvely produces for us on the right. | |||
<div class=mathpar> | |||
```haskell | |||
foo = let x = 1 in x | |||
``` | |||
```haskell | |||
; foo = let { x = 1 in x | |||
``` | |||
</div> | |||
You see it, right? Since no token was on a column before that of the token `x` (the reference token for the layout context started by `let`), no close brace was emitted before `in`. Woe is us! However, the Haskell report has a way around this. They write it cryptically, like this: | |||
> | |||
``` | |||
... | |||
L (t : ts) (m : ms) = } : (L (t : ts) ms) if m ≠ 0 and parse-error(t) | |||
... | |||
``` | |||
> The side condition `parse-error(t)` is to be interpreted as follows: if the tokens generated so far by `L` together with the next token `t` represent an invalid prefix of the Haskell grammar, and the tokens generated so far by `L` followed by the token `}` represent a valid prefix of the Haskell grammar, then `parse-error(t)` is true. | |||
> | |||
> The test `m ≠ 0` checks that an implicitly-added closing brace would match an implicit open brace. | |||
I'll translate, since I'm fluent in standardese: Parse errors are allowed to terminate layout blocks, as long as no explicit `{` was given. This is the entire reason that Happy has an `error` token, which "matches parse errors"! For further reference, `L` is a function `[Token] -> [Int] -> [Token]`{.haskell} which is responsible for inserting virtual `{`, `;` and `}` tokens. The `[Int]`{.haskell} argument is the stack of reference columns. | |||
So a better approximation of the grammar is: | |||
``` | |||
expr | |||
: ... | |||
| 'do' '{' statement ';' ... '}' { ... } | |||
| 'do' VOpen statement VSemi ... LClose { ... } | |||
LClose | |||
: VClose {- lexer inserted '}' -} | |||
| error {- parse error generated '}' -} | |||
``` | |||
We have unfortunately introduced some dragons, since the parser now needs to finesse the lexer state, meaning they must be interleaved _explicitly_, instead of being run in sequence (using a lazy list of tokens or similar). They must be in the same Monad. | |||
So. How do we implement this? | |||
# How we implement this | |||
## Preliminaries | |||
To start with, we create a new Haskell project. I'd normally gloss over this, but in this case, there are adjustments to the Cabal file that must be made to inform our build of the dependencies on `alex` and `happy`. I use Stack; You can use whatever. | |||
```bash | |||
% stack new layout simple | |||
``` | |||
To our Cabal file, we add a `build-tool-depends` on Alex and Happy. Cabal (the build system) comes with built-in rules to detect `.x` and `.y` files and compile these as Ale**x** and Happ**y** respectively. | |||
```{.haskell tag="layout.cabal"} | |||
build-tool-depends: alex:alex >= 3.2.4 && < 4.0 | |||
, happy:happy >= 1.19.12 && < 2.0 | |||
build-depends: base >= 4.7 && < 5 | |||
, array >= 0.5 && < 0.6 | |||
``` | |||
This has been the recommended way of depending on build tools since Cabal 2. The syntax of build-tool-depends entries is `package:executable [version bound]`, where the version bound is optional but good style. With this, running `stack build` (and/or `cabal build`) will automatically compile parser and lexer specifications **listed in your `other-modules` field** to Haskell files. | |||
Alex generated code has a dependency on the `array` package. | |||
## What are we parsing | |||
For the language we're parsing, I've chosen to go with a representative subset of Haskell's grammar: Variables, lambda expressions, `let` expressions, and application. For the top-level, we'll support function definitions, where the lhs must be a sequence of variables, and the rhs can optionally have a `where` clause. | |||
```{ .haskell tag="src/Syntax.hs" } | |||
module Syntax (Expr(..), Decl(..), Program) where | |||
data Expr | |||
= Var String | |||
| App Expr Expr | |||
| Lam String Expr | |||
| Let [Decl] Expr | |||
deriving (Eq, Show) | |||
data Decl | |||
= Decl { declName :: String | |||
, declRhs :: Expr | |||
, declWhere :: Maybe [Decl] | |||
} | |||
deriving (Eq, Show) | |||
type Program = [Decl] | |||
``` | |||
For simplicity, identifiers will be ASCII only. We're also using strings and lists everywhere, instead of more appropriate data structures (`Text` and `Seq`), for clarity. Don't forget to add the `Syntax` module to the `other-modules` field in `layout.cabal`. | |||
## The Lexer | |||
Before we can parse, we must lex. But before we can lex, we must know the type of tokens. We create a separate Haskell module to contain the definition of the token type and `Lexer` monad. This is mostly done because HIE does not support Alex and Happy, and I've become dependent on HIE for writing correct code fast. | |||
We'll call this new module `Lexer.Support`, just because. Our type of tokens must contain our keywords, but also punctuation (`=`, `{`, `;`, `}`, `\\`, `->`) and _virtual_ punctuation (tokens inserted by layout). We declare: | |||
```{.haskell tag="src/Lexer/Support.hs"} | |||
module Lexer.Support where | |||
data Token | |||
= TkIdent String -- identifiers | |||
-- Keywords | |||
| TkLet | TkIn | TkWhere | |||
-- Punctuation | |||
| TkEqual | TkOpen | TkSemi | TkClose | |||
| TkLParen | TkRParen | |||
| TkBackslash | TkArrow | |||
-- Layout punctuation | |||
| TkVOpen | TkVSemi | TkVClose | |||
-- End of file | |||
| TkEOF | |||
deriving (Eq, Show) | |||
``` | |||
### An Alex file | |||
Alex modules always start with a Haskell header, between braces. In general, braces in Alex code represent a bit of Haskell we're inserting: The header, lexer actions, and the footer. | |||
```{.alex tag="src/Lexer.x"} | |||
{ | |||
module Lexer where | |||
import Lexer.Support | |||
} | |||
%encoding "latin1" | |||
``` | |||
After the header, we can also include magical incantations: `%wrapper` will tell Alex to include a support code template with our lexer, and `%encoding` will tell it whether to work with bytes or with Unicode. _Nobody uses the Unicode support_, not even GHC: The community wisdom is to trick Alex into reading Unicode by compressing Unicode classes down into high byte characters. Yeah, **yikes**. | |||
Our file can then have some macro definitions. Macros with the `$` sigil are character classes, and `@` macros are complete regular expressions. | |||
```{.alex tag="src/Lexer.x"} | |||
$lower = [ a-z ] | |||
$upper = [ A-Z ] | |||
@ident = $lower [ $lower $upper _ ' ]* | |||
``` | |||
And, finally, comes the actual lexer specification. We include the final magic word `:-` on a line by itself, and then list a bunch of lexing rules. Lexing rules are specified by: | |||
- A _startcode_, which names a _state_. These are written `<ident>` or `<0>`, where `<0>` is taken to be the "default" startcode. Rules are by default enabled in all states, and can be enabled in many; | |||
- A _left context_, which is a regular expression matched against the character immediately preceding the token; | |||
- A _regular expression_, describing the actual token; | |||
- A _right context_, which can be a regular expression to be matched after the token or a fragment of Haskell code, called a _predicate_. If the predicate is present, it must have the following type: | |||
```{.haskell .notag} | |||
{ ... } :: user -- predicate state | |||
-> AlexInput -- input stream before the token | |||
-> Int -- length of the token | |||
-> AlexInput -- input stream after the token | |||
-> Bool -- True <=> accept the token | |||
``` | |||
- An _action_, which can be `;`, causing the lexer to skip the token, or some Haskell code, which can be any expression, as long as every action has the same type. | |||
Here's a couple rules so we can get started. Don't worry - `emit` is a secret tool that will help us later. | |||
```{.alex tag="src/Lexer.x"} | |||
:- | |||
[\ \t]+ ; | |||
<0> @ident { emit TkIdent } | |||
``` | |||
Alright, let's compile this code and see what we get! Oh, we get some type errors. Okay. Let's see what's up: | |||
``` | |||
Not in scope: type constructor or class ‘AlexInput’ | |||
| | |||
264 | | AlexLastSkip !AlexInput !Int | |||
| ^^^^^^^^^ | |||
``` | |||
### Making our own wrapper | |||
Right. That's probably related to that `%wrapper` thing I told you about. You'd be correct: The wrappers solve this problem by including a handful of common patterns pre-made, but we can very well supply our own! The interface to an Alex-generated lexer is documented [here](https://www.haskell.org/alex/doc/html/api.html), but we're interested in §5.1 specifically. We have to provide the following definitions: | |||
```{.haskell .notag} | |||
type AlexInput | |||
alexGetByte :: AlexInput -> Maybe (Word8, AlexInput) | |||
alexInputPrevChar :: AlexInput -> Char | |||
``` | |||
And we get in return a lexing function, whose type and interface I'm not going to copy-paste here. The `alexGetByte` function is called by the lexer whenever it wants input, so that's the natural place to do position handling, which, yes, we have to do ourselves. Let's fill in these definitions in the `Lexer.Support` module. | |||
Here's an okay choice for `AlexInput`: | |||
```{.haskell tag="src/Lexer/Support.hs"} | |||
data AlexInput | |||
= Input { inpLine :: {-# UNPACK #-} !Int | |||
, inpColumn :: {-# UNPACK #-} !Int | |||
, inpLast :: {-# UNPACK #-} !Char | |||
, inpStream :: String | |||
} | |||
deriving (Eq, Show) | |||
``` | |||
We can immediately take `alexInputPrevChar = inpLast` as the definition of that function and be done with it, which is fantastic. `alexGetByte`, on the other hand, is a bit more involved, since it needs to update the position based on what character was read. The column _must_ be set properly, otherwise layout won't work! The line counter is less important, though. | |||
```haskell | |||
alexGetByte :: AlexInput -> Maybe (Word8, AlexInput) | |||
alexGetByte inp@Input{inpStream = str} = advance <$> uncons str where | |||
advance ('\n', rest) = | |||
( fromIntegral (ord '\n') | |||
, Input { inpLine = inpLine inp + 1 | |||
, inpColumn = 1 | |||
, inpLast = '\n' | |||
, inpStream = rest } | |||
) | |||
advance (c, rest) = | |||
( fromIntegral (ord c) | |||
, Input { inpLine = inpLine inp | |||
, inpColumn = inpColumn inp + 1 | |||
, inpLast = c | |||
, inpStream = rest } | |||
) | |||
``` | |||
Now, our lexer has a lot of state. We have the start codes, which form a stack. We have the stack of reference columns, and we have the input. Let's use a State monad to keep track of this, with an `Either String` base to keep track of errors. | |||
```{.haskell tag="src/Lexer/Support.hs"} | |||
newtype Lexer a = Lexer { _getLexer :: StateT LexerState (Either String) a } | |||
deriving | |||
( Functor | |||
, Applicative | |||
, Monad | |||
, MonadState LexerState | |||
, MonadError String | |||
) | |||
data Layout = ExplicitLayout | LayoutColumn Int | |||
deriving (Eq, Show, Ord) | |||
data LexerState | |||
= LS { lexerInput :: {-# UNPACK #-} !AlexInput | |||
, lexerStartCodes :: {-# UNPACK #-} !(NonEmpty Int) | |||
, lexerLayout :: [Layout] | |||
} | |||
deriving (Eq, Show) | |||
initState :: String -> LexerState | |||
initState str = LS { lexerInput = Input 0 1 '\n' str | |||
, lexerStartCodes = 0 :| [] | |||
, lexerLayout = [] | |||
} | |||
runLexer :: Lexer a -> String -> Either String a | |||
runLexer act s = fst <$> runStateT (_getLexer act) (initState s) | |||
``` | |||
<details> | |||
<summary> I'll spare you the boring stack manipulation stuff by putting it in one of these \<details\> elements you can expand: </summary> | |||
```haskell | |||
startCode :: Lexer Int | |||
startCode = gets (NE.head . lexerStartCodes) | |||
pushStartCode :: Int -> Lexer () | |||
pushStartCode i = modify' $ \st -> | |||
st { lexerStartCodes = NE.cons i (lexerStartCodes st ) | |||
} | |||
-- If there is no start code to go back to, we go back to the 0 start code. | |||
popStartCode :: Lexer () | |||
popStartCode = modify' $ \st -> | |||
st { lexerStartCodes = | |||
case lexerStartCodes st of | |||
_ :| [] -> 0 :| [] | |||
_ :| (x:xs) -> x :| xs | |||
} | |||
layout :: Lexer (Maybe Layout) | |||
layout = gets (fmap fst . uncons . lexerLayout) | |||
pushLayout :: Layout -> Lexer () | |||
pushLayout i = modify' $ \st -> | |||
st { lexerLayout = i:lexerLayout st } | |||
popLayout :: Lexer () | |||
popLayout = modify' $ \st -> | |||
st { lexerLayout = | |||
case lexerLayout st of | |||
_:xs -> xs | |||
[] -> [] | |||
} | |||
``` | |||
</details> | |||
### Putting it all together | |||
It's up to us to specify what an action is - remember, the action is the code block following a lexer rule - so we'll go with `String -> Lexer Token`. The `String` argument is the lexed token, and we'll have to take this slice ourselves when we implement the interface between the Alex lexer and our `Lexer` monad. The `emit` action is simple, and we'll throw in `token` for no extra cost: | |||
```haskell | |||
emit :: (String -> Token) -> String -> Lexer Token | |||
emit = (pure .) | |||
token :: Token -> String -> Lexer Token | |||
token = const . pure | |||
``` | |||
Back to our `Lexer.x`, we have to write the function to interpret Alex lexer results as `Lexer` monad actions. It goes like this: | |||
```{.haskell tag="src/Lexer.x, add at the bottom" } | |||
{ | |||
handleEOF = do | |||
-- TODO: handle layout | |||
pure TkEOF | |||
scan :: Lexer Token | |||
scan = do | |||
input@(Input _ _ _ string) <- gets lexerInput | |||
startcode <- startCode | |||
case alexScan input startcode of | |||
AlexEOF -> handleEOF | |||
AlexError (Input _ _ _ inp) -> | |||
throwError $ "Lexical error: " ++ show (head inp) | |||
AlexSkip input' _ -> do | |||
modify' $ \s -> s { lexerInput = input' } | |||
scan | |||
AlexToken input' tokl action -> do | |||
modify' $ \s -> s { lexerInput = input' } | |||
action (take tokl string) | |||
} | |||
``` | |||
Now we can do a `stack build` to compile the lexer and `stack repl` to play around with it! | |||
```{.haskell tag="Did you know my Myers-Briggs type is GHCI?"} | |||
λ runLexer scan "abc" | |||
Right (TkIdent "abc") | |||
λ runLexer scan " abc" | |||
Right (TkIdent "abc") | |||
λ runLexer scan " {" | |||
Left "Lexical error: '{'" | |||
``` | |||
Okay, yeah, let's fill out our lexer a bit more. | |||
```{.alex tag="src/Lexer.x, lexing rules"} | |||
<0> in { token TkIn } | |||
<0> \\ { token TkBackslash } | |||
<0> "->" { token TkArrow } | |||
<0> \= { token TkEqual } | |||
<0> \( { token TkLParen } | |||
<0> \) { token TkRParen } | |||
<0> \{ { token TkOpen } | |||
<0> \} { token TkClose } | |||
``` | |||
That's all of the easy rules we can do - All of the others interact with the layout state, which we'll see how to do in the paragraph immediately following this one. I'm writing a bit of padding here so you can take a breather and prepare yourself for the lexer states that we'll deal with now. But, please believe me when I say we're doing this lexer madness so our parser can be sane. | |||
### Actually Doing Layout (trademark pending) | |||
We'll need two rules for the layout keywords. Alex rules are matched in order, top-to-bottom, so **make sure your keywords are before your identifier rule**. | |||
```{.alex tag="src/Lexer.x"} | |||
<0> let { layoutKw TkLet } | |||
<0> where { layoutKw TkWhere } | |||
``` | |||
And the action for layout keywords, which has to go in the lexer since it'll refer to a startcode. Alex automatically generates definitions for all the startcodes we mention. | |||
```haskell | |||
layoutKw t _ = do | |||
pushStartCode layout | |||
pure t | |||
``` | |||
The interesting rules for handling layout are in the `layout` startcode, which we'll declare as a block to keep things a bit tidier. When in this startcode, we need to handle either an explicitly laid-out block (that is, `{`), or the start of a layout context: The indentation of the next token determines where we start. | |||
```{.alex tag="src/Lexer.x"} | |||
<layout> { | |||
-- Skip comments and whitespace | |||
"--" .* \n ; | |||
\n ; | |||
\{ { openBrace } | |||
() { startLayout } | |||
} | |||
``` | |||
The `openBrace` and `startLayout` lexer actions are also simple: | |||
```haskell | |||
openBrace _ = do | |||
popStartCode | |||
pushLayout ExplicitLayout | |||
pure TkOpen | |||
startLayout _ = do | |||
popStartCode | |||
reference <- Lexer.Support.layout | |||
col <- gets (inpColumn . lexerInput) | |||
if Just (LayoutColumn col) <= reference | |||
then pushStartCode empty_layout | |||
else pushLayout (LayoutColumn col) | |||
pure TkVOpen | |||
``` | |||
Here's another rule. suppose we have: | |||
```haskell | |||
foo = bar where | |||
spam = ham | |||
``` | |||
If we just apply the rule that the next token after a layout keyword determines the column for the layout context, then we're starting another layout context at column 1! that's definitely not what we want. | |||
The fix: A new layout context only starts if the first token is to the right of the previous layout context. That is: a block only starts if it's on the same column as the layout context, or indented further. | |||
But! We still need to emit a closing `}` for the one that `openBrace` generated! This is the sole function of the `empty_layout` startcode: | |||
<div class=mathpar> | |||
``` | |||
<empty_layout> () { emptyLayout } | |||
``` | |||
```haskell | |||
emptyLayout _ = do | |||
popStartCode | |||
pushStartCode newline | |||
pure TkVClose | |||
``` | |||
</div> | |||
We're on the home stretch. I mentioned another startcode - `newline`. It's where we do the offside rule, and our lexer will finally be complete. | |||
### The Offside Rule, again | |||
The `newline` state is entered in two places: After an empty layout block (as a short-circuit), and after, well, a new line character. Comments also count as newline characters, by the way. | |||
```{.alex tag="src/Lexer.x, rule"} | |||
<0> "--" .* \n { \_ -> pushStartCode newline *> scan } | |||
<0> \n { \_ -> pushStartCode newline *> scan } | |||
``` | |||
In the `newline` state, we again scan for a token, and call for an action, just like for `layout`. The difference is only in the action: Whenever _any_ token is encountered, we perform the offside rule, _if_ we're in a layout context that mandates it. | |||
```{.alex tag="src/Lexer.x, rule"} | |||
<newline> { | |||
\n ; | |||
"--" .* \n ; | |||
() { offsideRule } | |||
} | |||
``` | |||
The code for the offside rule is a bit hairy, but follows from the spec: | |||
```{.haskell tag="src/Lexer.x, epilogue code"} | |||
offsideRule _ = do | |||
context <- Lexer.Support.layout | |||
col <- gets (inpColumn . lexerInput) | |||
let continue = popStartCode *> scan | |||
case context of | |||
Just (LayoutColumn col') -> do | |||
case col `compare` col' of | |||
EQ -> do | |||
popStartCode | |||
pure TkVSemi | |||
GT -> continue | |||
LT -> do | |||
popLayout | |||
pure TkVClose | |||
_ -> continue | |||
``` | |||
Check out how cleanly those three cases map to the rules I described [way back when](#h0). We `compare`{.haskell} the current column with the reference, and: | |||
- If it's `EQ`, add a semicolon. | |||
- If it's `GT`, continue lexing. | |||
- If it's `LT`, close as many layout contexts as possible. | |||
<details> | |||
<summary> | |||
**Exercise**: In the `handleEOF` action, close all the pending layout contexts. As a hint, the easiest way to emit a token that doesn't is using a startcode and a lexer action. Figuring out when we've run out is part of the challenge :) | |||
</summary> | |||
The rule: | |||
```{.alex tag="src/Lexer.x, rule"} | |||
<eof> () { doEOF } | |||
``` | |||
The action: | |||
```{.haskell tag="src/Lexer.x, epilogue code"} | |||
handleEOF = pushStartCode eof *> scan | |||
doEOF _ = do | |||
t <- Lexer.Support.layout | |||
case t of | |||
Nothing -> do | |||
popStartCode | |||
pure TkEOF | |||
_ -> do | |||
popLayout | |||
pure TkVClose | |||
``` | |||
</details> | |||
We can write a `Lexer` action (not a lexer action!) to lex and `Debug.Trace.trace`{.haskell} - sue me - as many tokens as the lexer wants to give us, until an EOF is reached: | |||
```haskell | |||
lexAll :: Lexer () | |||
lexAll = do | |||
tok <- scan | |||
case tok of | |||
TkEOF -> pure () | |||
x -> do | |||
traceM (show x) | |||
lexAll | |||
``` | |||
Now we can actually lex some Haskell code! Well, not much of it. Forget numbers, strings, and most keywords, but we _can_ lex this: | |||
<div class="mathpar"> | |||
```haskell | |||
foo = let | |||
x = let | |||
y = z | |||
in y | |||
in x | |||
``` | |||
```haskell | |||
TkIdent "foo" | |||
TkEqual | |||
TkLet | |||
TkVOpen | |||
TkIdent "x" | |||
TkEqual | |||
TkLet | |||
TkVOpen | |||
TkIdent "y" | |||
TkEqual | |||
TkIdent "z" | |||
TkVSemi | |||
TkIn | |||
TkIdent "y" | |||
TkVClose | |||
TkVClose | |||
TkIn | |||
TkIdent "x" | |||
``` | |||
</div> | |||
That is, that code is lexed as if it had been written: | |||
```{.haskell tag="Hmm..."} | |||
foo = let { | |||
x = let { | |||
y = z | |||
; in y | |||
}} in x | |||
``` | |||
That's... Yeah. Hmm. That's _not right_. What are we forgetting? Ah, who am I kidding, you've guessed this bit. I even said it myself! | |||
> Parse errors are allowed to terminate layout blocks. | |||
We don't have a parser to get errors from, so our layout blocks are terminating too late. Let's write a parser! | |||
## The Parser | |||
Happy is, fortunately, less picky about how to generate code. Instead of appealing to some magic symbols that it just hopes really hard are in scope, Happy asks us how we want it to interface with the lexer. We'll do it &sparkles; Monadically &sparkles;, of course. | |||
Happy files start the same way as Alex files: A Haskell code block, between braces, and some magic words. You can look up what the magic words do in the documentation, or you can guess - I'm just gonna include all the header here: | |||
```{.happy tag="src/Parser.y"} | |||
{ | |||
module Parser where | |||
import Control.Monad.Error | |||
import Lexer.Support | |||
} | |||
%name parseExpr Expr | |||
%tokentype { Token } | |||
%monad { Lexer } | |||
%lexer { lexer } { TkEOF } | |||
%errorhandlertype explist | |||
%error { parseError } | |||
``` | |||
After these magic incantations (by the way, if you can't find the docs for errorhandlertype, that's because the docs you're looking at are out of date. See [here](https://monlih.github.io/happy-docs/)), we list our tokens in the `%token` directive. In the braces we write Haskell - not an expression, but a pattern. | |||
```{.happy tag="src/Parser.y, after the directives"} | |||
%token | |||
VAR { TkIdent $$ } | |||
'let' { TkLet } | |||
'in' { TkIn } | |||
'where' { TkWhere } | |||
'=' { TkEqual } | |||
'{' { TkOpen } | |||
';' { TkSemi } | |||
'}' { TkClose } | |||
'\\' { TkBackslash } | |||
'->' { TkArrow } | |||
'(' { TkLParen } | |||
')' { TkRParen } | |||
OPEN { TkVOpen } | |||
SEMI { TkVSemi } | |||
CLOSE { TkVClose } | |||
%% | |||
``` | |||
The special `$$` pattern says that if we use a `VAR` token in a production, its value should be the string contained in the token, rather than the token itself. We write productions after the `%%`, and they have this general syntax: | |||
```happy | |||
Production :: { Type } | |||
: rule1 { code1 } | |||
| rule2 { code2 } | |||
| ... | |||
``` | |||
For starters, we have these productions. You can see that in the code associated with a rule, we can refer to the tokens parsed using `$1, $2, $3, ...`. | |||
```{.happy tag="src/Parser.y, after the %%"} | |||
Atom :: { Expr } | |||
: VAR { Var $1 } | |||
| '(' Expr ')' { $2 } | |||
Expr :: { Expr } | |||
: '\\' VAR '->' Expr { Lam $2 $4 } | |||
| FuncExpr { $1 } | |||
FuncExpr :: { Expr } | |||
: FuncExpr Atom { App $1 $2 } | |||
| Atom { $1 } | |||
``` | |||
In the epilogue, we need to define two functions, since I mentioned them way up there in the directives. The `lexer` function is a continuation-passing style function that needs to call `cont` with the next token from the lexer. The `parseError` function is how we should deal with parser errors. | |||
```{.happy tag="src/Parser.y, on the very bottom"} | |||
{ | |||
lexer cont = scan >>= cont | |||
parseError = throwError . show | |||
} | |||
``` | |||
By using the `%name` directive we can export a parser production as an action in the `Lexer` monad (since that's what we told Happy to use). Combining that with our `runLexer`, we can parse some expressions, yay! | |||
```haskell | |||
λ runLexer parseExpr "(\\x -> x) (\\y -> y)" | |||
Right (App (Lam "x" (Var "x")) (Lam "y" (Var "y"))) | |||
``` | |||
### Laid-out productions | |||
Now we'll introduce some productions for parsing laid-out lists of declarations, then we'll circle back and finish with the parser for declarations itself. | |||
```{.happy tag="src/Parser.y, add wherever"} | |||
DeclBlock :: { [Decl] } | |||
: '{' DeclListSemi '}' { $2 } | |||
| OPEN DeclListSEMI Close { $2 } | |||
DeclListSemi :: { [Decl] } | |||
: Decl ';' DeclListSemi { $1:$3 } | |||
| Decl { [$1] } | |||
| {- empty -} { [] } | |||
DeclListSEMI :: { [Decl] } | |||
: Decl SEMI DeclListSemi { $1:$3 } | |||
| Decl { [$1] } | |||
| {- empty -} { [] } | |||
``` | |||
That is, a block of declarations is either surrounded by `{ ... }` or by `OPEN ... Close`. But what's `Close`? That's right, you've guessed this bit too: | |||
```{.happy tag="src/Parser.y, add wherever"} | |||
Close | |||
: CLOSE { () } | |||
| error {% popLayout } | |||
``` | |||
Say it louder for the folks in the cheap seats - Parse! Errors! Can! End! Layout! Blocks! Isn't that just magical? | |||
Now we can write a production for `let` (in `Expr`): | |||
```{.happy tag="src/Parser.y, add to Expr"} | |||
| 'let' DeclBlock 'in' Expr { Let $2 $4 } | |||
``` | |||
And one for declarations: | |||
```{.happy tag="src/Parser.y, add wherever"} | |||
Decl | |||
: VAR '=' Expr { Decl $1 $3 Nothing } | |||
| VAR '=' Expr 'where' DeclBlock { Decl $1 $3 (Just $5) } | |||
``` | |||
Add a name directive for `Decl` and.. | |||
```{.happy tag="src/Parser.y, add to the directives"} | |||
%name parseDecl Decl | |||
``` | |||
We're done! | |||
# No, seriously, that's it. | |||
Yeah, 3000 words is all it takes to implement a parser for Haskell layout. Running this on the example where the lexer dropped the ball from earlier, we can see that the parser has correctly inserted all the missing `}`s in the right place because of the `Close` production, and the AST we get is what we expect: | |||
```haskell | |||
λ runLexer parseDecl <$> readFile "that-code-from-before.hs" | |||
Right | |||
(Decl { declName = "foo" | |||
, declRhs = | |||
Let [ Decl { declName = "x" | |||
, declRhs = | |||
Let | |||
[ Decl { declName = "y", declRhs = Var "z" | |||
, declWhere = Nothing} ] | |||
(Var "y") | |||
, declWhere = Nothing | |||
} | |||
] | |||
(Var "x") | |||
, declWhere = Nothing | |||
}) | |||
``` | |||
I've thrown the code from this post up in an organised manner on [my Gitea](https://git.amelia.how/amelia/layout-parser/). The lexer worked out to be 130 lines, and the parser - just 81. | |||
Here's why I favour this approach: | |||
- It's maintainable. Apart from the rendezvous in `Close`, the lexer and the parser are completely independent. They're also entirely declarative - Reading the lexer rules tells you exactly what the lexer does, without having to drop down to how the actions are implemented. | |||
- It cleanly extends to supporting ASTs with annotations - you'd change our current `Token`{.haskell} type to a `TokenClass`{.haskell} type, and a `Token` would be finished using the line and column from the lexer state. Annotating the AST with these positions can be done by projecting from `$N` in the Happy rules. | |||
- It's performant. Obviously the implementation here, using `String`, is not, but by changing how the `AlexInput` type behaves internally, we can optimise by using e.g. a lazy ByteString, a lazy Text, or some other kind of crazy performant stream type. I don't think anyone's ever complained about parsing being their bottleneck with GHC. | |||
- It's popular! The code implemented here is a simplification (wild simplification) of the approach used in GHC and Agda. | |||
Thank you for reading this post. I have no idea what I'm going to write about next! |
@ -0,0 +1,420 @@ | |||
--- | |||
title: Typing (GHC) Haskell in Haskell | |||
subtitle: The OutsideIn(X) Elaboration Algorithm | |||
date: September 5th, 2021 | |||
public: false | |||
--- | |||
Typing Haskell in Haskell, in addition to being a solved problem, is the name of [a paper] by Mark P. Jones that constructs, in detail, a solution to that problem. The goal of that paper is noble: a complete specification of Haskell's type system as an executable Haskell program. And, indeed, in 2000, when that paper was published, it _was_ a complete specification of Haskell's type system, depending on what you mean by Haskell. However, most people do not mean "Haskell 2010" when they say Haskell, let alone Haskell 98 - what the paper implements. Further, it's been 21 years! | |||
[a paper]: https://web.cecs.pdx.edu/~mpj/thih/thih.pdf | |||
When I say Haskell, personally, I mean "GHC's default language", and possibly throw in some 20 extensions on top anyway. Here's a small list of conveniences 2021 Haskell programmers are used to, but were implemented in the two decades since _Typing Haskell in Haskell_ was first published - or, in the case of FunDeps, were simply not standardised: | |||
- Rank-N types, a limited implementation of first-class polymorphism, let a Haskell programmer write `forall`s to the left of as many arrows as she wants. For a motivating example, take the ST monad, from which a value can be extracted using `runST`: | |||
```haskell | |||
runST :: (forall s. ST s a) -> a | |||
``` | |||
Since the type of the state token - `s` - is universally quantified, it's not "chosen" by the ST computation, but rather by `runST` itself, making sure that the computation can't adversarially "choose" an instantiation of `s` that violates referential transparency. | |||
Rank-N types were first implemented in GHC in November 2001, in [this commit](https://gitlab.haskell.org/ghc/ghc/-/commit/5e3f005d3012472e422d4ffd7dca5c21a80fca80). | |||
[rankn]: https://www.microsoft.com/en-us/research/wp-content/uploads/2016/02/putting.pdf | |||
- Generalised algebraic data types (GADTs), which let us introduce local _equational constraints_ between types by means of pattern matching. I'm a big fan of GADTs, so much so that I paid 20 bucks to register the domain [gadt.fans](https://gadt.fans). The classic example of GADTs is a well-typed interpreter, where the type of each constructor constrains the return type of the interpreter: | |||
```haskell | |||
data Exp a where | |||
Add :: Exp Int -> Exp Int -> Exp Int | |||
IsZ :: Exp Int -> Exp Bool | |||
If :: Exp Bool -> Exp a -> Exp a -> Exp a | |||
Lit :: Int -> Exp Int | |||
eval :: Exp a -> a | |||
eval (Lit i) = i | |||
{- most cases omitted for brevity -} | |||
``` | |||
GADTs were first implemented in GHC in September 2004, in [this commit](https://gitlab.haskell.org/ghc/ghc/-/commit/23f40f0e9be6d4aa5cf9ea31d73f4013f8e7b4bd). | |||
- Functional dependencies, inspired by database theory, let a programmer specify that some of the arguments to one of their type classes is entirely determined by the value of some other argument. If that's a bit abstract, a more operational reading is that functional dependencies improve inferred types by adding new equalities. The classic example is this: | |||
```haskell | |||
class Collects c e | c -> e where | |||
singleton :: e -> c | |||
union :: c -> c -> c | |||
``` | |||
Without the functional dependency, the inferred type for the function `bagTwo` below would be `(Collects c e1, Collects c e2) => e1 -> e2 -> c`{.haskell}, implying that bagTwo is capable of placing two values of different types in the same collection `c`. | |||
```haskell | |||
bagTwo x y = singleton x `union` singleton y | |||
``` | |||
With the functional dependency `c -> e` in place, the two inferered constraints `(Collects c e1, Collects c e2)` _interact_ to introduce an equality `e1 ~ e2`, improving the inferred type of the function to | |||
```haskell | |||
bagTwo :: Collects c a => a -> a -> c | |||
``` | |||
Functional dependencies were first implemented in GHC in December 1999, in [this commit](https://gitlab.haskell.org/ghc/ghc/-/commit/297f714906efa8a76378c6fa6db3cd592f896749). The connection between database theory and type systems, integral in the design of functional dependencies for Haskell type classes, is made clear in [the original paper](http://web.cecs.pdx.edu/~mpj/pubs/fundeps-esop2000.pdf), section 5. | |||
- Type families, originally introduced as _associated types_, are, as [Richard Eisenberg put it](https://gitlab.haskell.org/ghc/ghc/-/issues/11080), "non-generative, non-injective symbols whose equational theory is given by GHC". Put another way, they're almost-but-not-quite functions between types. Type families are _weird_, and complicate type checking massively. For instance, consider the following program, taken from Storalek et al.'s "Injective Type Families for Haskell": | |||
```haskell | |||
class Manifold a where | |||
type Base a | |||
project :: a -> Base a | |||
unproject :: Base a -> a | |||
id :: Manifold a => Base a -> Base a | |||
id = project . unproject | |||
``` | |||
Does this program type check? Surprisingly, the answer is no! The reason is that the type variable `a` only appears under type families, and in the set of constraints, so GHC reports the function's type as ambiguous. | |||
To understand why this is problematic, imagine that we have two types `X`{.haskell} and `Y`{.haskell} such that `Base X = Base Y = [Double]`{.haskell}. Given a `vec :: [Double]`, what instance of `Manifold` should the call `id vec` use? We can't choose - we can only guess, and runtime behaviour that depends on a compiler guess is very much frowned upon! | |||
Type families were originally implemented ca. 2006, but I've been unable to track down the precise commit. I believe it was done as part of the patch which changed GHC's intermediate representation to System $F_C$ (we'll get to it) - this is backed up by this sentence from the conclusion of the $F_C$ paper: "At the same time, we re-implemented GHC’s support for newtypes and GADTs to work as outlined in §2 and added support for associated (data) types". | |||
All of these features interact with eachother in entirely non-trivial ways, creating a powerful source of GHC infelicities with $n^2$ magnitude. The interaction between GADTs and type families, for instance, mandates an elaboration algorithm which can cope with _local assumptions_ in a principled way, since GADTs can introduce equalities between existentials which interact with type family axioms non-trivially. Wobbly types just won't cut it. | |||
That's where $\operatorname{OutsideIn}$ comes in - or, more specifically, $\operatorname{OutsideIn}(X)$, since the elaboration algorithm is parametrised over the constraint domain $X$. This post is intended as a companion to [the JFP paper introducing OutsideIn](https://www.microsoft.com/en-us/research/wp-content/uploads/2016/02/jfp-outsidein.pdf), not as a replacement. The core idea is that we can record where the local assumptions are introduced in a tree of _implication constraints_, built out of the constraints in our domain $X$, and these can be reduced - outside-in - to an $X$-specific solver. | |||
Diverging from the paper slightly, I'll implement the elaborator as a _bidirectional_ algorithm, which lets us take advantage of programmer-written type signatures. The signatures are there for a reason! It's silly to use type signatures as a source of complication (infer a type for the binding, then match it against the signature) rather than as a source of _simplification_. Plus - bidirectional type checking makes higher-rank types almost trivial - I think we can all agree that's a good thing, yeah? | |||
# The Problem Statement | |||
We're given a Haskell program - well, a program written in a proper subset of a proper superset of Haskell - and we want to tell whether it's type correct. Our superset extends Haskell 2010 to feature type families, GADTs, rank-N types and functional dependencies, but our subset doesn't contain most of Haskell's niceties, like definitions by equations, guards, or even `if`{.haskell}: you get `case`{.haskell}, and _you're going to like it_. | |||
Well, more than just telling whether or not the program is type correct, we want to produce an _elaborated program_ in a simpler language - GHC calls this "Core" - if and only if the program is correct, and report a (set of) good type errors otherwise. The elaborated program also has to be type correct, and, ideally, we have a _second_, much smaller type checker over the Core language that calls the big, complicated elaborator out on its bullshit. Because of this, the elaborator has to produce _evidence_ justifying its wilder claims. | |||
There are two kinds of evidence we need to produce: _coercions_ are inserted where the expected type of an expression is equal to its actual type in a non-trivial way. Consider the program below, and its elaboration to the right: | |||
<div class=mathpar> | |||
```haskell | |||
data T1 a where | |||
TI :: T1 Int | |||
TB :: T1 Bool | |||
foo :: T1 a -> a | |||
foo x = case x of | |||
TI -> 123 | |||
TB -> True | |||
``` | |||
```haskell | |||
data T1 a where | |||
TI :: (a ~# Int) => T1 a | |||
TB :: (a ~# Bool) => T1 a | |||
foo :: T1 a -> a | |||
foo x = case x of | |||
TI phi -> 123 |> Sym phi | |||
TB phi -> True |> Sym phi | |||
``` | |||
</div> | |||
This program, which uses GADTs (see `data ... where`{.haskell}), has two non-trivial equalities between types. In the `TI -> 123`{.haskell} case, we used an `Int`{.haskell}[^1] literal where a value of type `a` was expected. But in that branch, `a` is equal to `Int`{.haskell}! In the elaborated output, this non-trivial local equality is explicitly witnessed by a _coercion variable_ `phi :: a ~# Int`{.haskell}, and the use of `123 :: Int`{.haskell} at type `a` has to be mediated by a _cast_. | |||
[^1]: Actually, if you know how numeric literals desugar, you might know the actual elaboration produced here is different: `123` becomes `fromInteger @a ($NumInt |> Num (Sym phi)) (123 :: Integer)`{.haskell}. This is because it's totally legit to cast the `Num Int`{.haskell} dictionary to a `Num a`{.haskell} dictionary using the local equality, and, since `123`{.haskell} is sugar for `fromInteger @α (123 :: Integer)`{.haskell}, `α` gets solved to `a`, not `Int`{.haskell}. | |||
The other kind of evidence is not specific to GADTs, type families, or any other type fanciness: _dictionaries_ witness the existence of a type class `instance`{.haskell}, but, unlike coercions (which only exist to make the second type checker happy), exist at runtime. Consider the program below and its elaboration: | |||
<div class=mathpar> | |||
```haskell | |||
class S a where | |||
s :: a -> String | |||
instance S Int where | |||
s = show | |||
foo :: Int -> String | |||
foo x = s (x + 123) | |||
``` | |||
```haskell | |||
data $S a = | |||
$MkS { s :: a -> String } | |||
$dSInt :: S Int | |||
$dSInt = $MkS @Int (show @Int $dShowInt) | |||
foo :: Int -> String | |||
foo x = s @Int $dSInt ((+) @Int $dNumInt x 123) | |||
``` | |||
</div> | |||
Type `class`{.haskell}es are elaborated to `data`{.haskell} types, and `instance`{.haskell}s are compiled to actual proper values of those data types. When you apply a function with overloaded type - like `s`, `show` and `(+)` - the compiler inserts the value corresponding to the `instance`{.haskell} that was selected to satisfy the class constraint. Further, `instance`{.haskell}s with superclass constraints become functions from dictionaries to dictionaries, and superclasses on `class`{.haskell}es become values embedded in the dictionary, just like class methods. | |||
You'll also notice another artifact of elaboration here: the use of `s` at type `Int`{.haskell} became a _visible type application_ `s @Int`{.haskell}. This is, again, to satisfy the second type checker, but it can in principle be used as an actual implementation of polymorphism - one that doesn't box. See [Sixten](https://github.com/ollef/sixten) for a language that exploits this type passing to implement polymorphism without monomorphisation. Type applications are used in every polymorphic function application, not just those with class constraints. | |||
## Why it's hard | |||
GADTs complicate the problem of type inference in a way that's become rather famous: GADTs destroy the _principal types_ property. Recall: A **principal type** for a function $f$ is a type $\tau$ such that, $\Gamma \vdash f : \tau$ and, if $\Gamma \vdash f : \sigma$, then $\sigma$ is a substitution instance of $\tau$. Using less KaTeX, a principal type for a function is a _most general type_ for a type. For instance, the functions below are annotated with their principal types: | |||
```haskell | |||
id :: a -> a | |||
id x = x | |||
const :: a -> b -> a | |||
const x _ = x | |||
``` | |||
But now consider this program using GADTs: | |||
```haskell | |||
data T a where | |||
T1 :: Int -> T Bool | |||
T2 :: T a | |||
test x y = case x of | |||
T1 n -> n > 0 | |||
T2 -> y | |||
``` | |||
One can verify - and we will - that the function test types as either `test :: forall a. T a -> Bool -> Bool`{.haskell} or as `forall a. T a -> a -> a`{.haskell}, but neither of these types is an instance of the other! Let's look at why `test` checks with either of those types, with a _lot_ of detail - mimicking by hand the execution of the algorithm. Don't worry about all the terms I'll be throwing around: they'll all be explained later, I promise! | |||
<details class=blockquote> | |||
<summary> **`test :: forall a. T a -> Bool -> Bool`{.haskell}** </summary> | |||
The algorithm is in checking mode, since we have a type signature. | |||
1. Introduce a binding `x :: T a`{.haskell} into scope. We must check the body of the function against the type `Bool -> Bool`{.haskell} | |||
2. Introduce a binding `y :: Bool` into scope. We must check the body of the function against the type `Bool`{.haskell}. | |||
3. Check the `case`{.haskell} expression against the type `Bool`{.haskell}. There are two branches. | |||
* `T1 n -> n > 0`{.haskell}: | |||
* Instantiate the type of the constructor `T1 :: forall a. (a ~ Bool) => Int -> T a`{.haskell} with `a := X`{.haskell} to get the type `T1 :: a ~ Bool => Int -> T a`{.haskell}, where `a` is a _skolem_ type variable. The type variable `a` becomes a skolem and not a unification variable because it is an _existential_ of T1. | |||
* Introduce the local equality assumption `phi :: a ~ Bool`{.haskell} and the variable `n :: Int`{.haskell}. | |||
* Check that `n > 0 :: Bool`{.haskell}. For brevity, we'll take this to be one atomic step, which succeeds, but the real algorithm must treat all of those subexpressions independently. | |||
* `T2 -> y`{.haskell}. We must check that `y :: Bool`{.haskell}, which succeeds. | |||
Since all of these steps succeed (most of them are introducing variables and can't fail) - the program is type-correct. Note that in the branch with a local equality, our assumption that `a ~ Bool`{.haskell} wasn't used. | |||
</details> | |||
<details class=blockquote> | |||
<summary> **`test :: forall a. T a -> a -> a`{.haskell}** </summary> | |||
The algorithm is in checking mode, since we have a type signature. | |||
1. Introduce a binding `x :: T a`{.haskell} into scope. We must check the body of the function against the type `a -> a`{.haskell} | |||
2. Introduce a binding `y :: a` into scope. We must check the body of the function against the type `a`{.haskell}. | |||
3. Check the `case`{.haskell} expression against the type `Bool`{.haskell}. There are two branches. | |||
* `T1 n -> n > 0`{.haskell}: | |||
* Instantiate the type of the constructor `T1 :: forall a. (a ~ Bool) => Int -> T a`{.haskell} with `a := X`{.haskell} to get the type `T1 :: a ~ Bool => Int -> T a`{.haskell}, where `a` is a _skolem_ type variable. The type variable `a` becomes a skolem and not a unification variable because it is an _existential_ of T1. | |||
* Introduce the local equality assumption `phi :: a ~ Bool`{.haskell} and the variable `n :: Int`{.haskell}. | |||
* Check that `n > 0 :: a`{.haskell}. We infer that `n > 0 :: Bool`{.haskell}, and we must unify `Bool ~ a`{.haskell}. This unification succeeds because of the given equality `phi :: a ~ Bool`{.haskell}, which we are free to invert. | |||
* `T2 -> y`{.haskell}. We must check that `y :: a`{.haskell}, which succeeds. | |||
Since all of these steps succeed (most of them are introducing variables and can't fail) - the program is type-correct. In this typing, compared with the previous, we made use of the assumption `phi :: a ~ Bool` brought into scope by the match against the constructor `T1 n`. | |||
</details> | |||
The execution trace for both cases is remarkably similar - the only difference is in that if the function is typed as `T a -> a -> a`{.haskell}, we must make use of the local equality brought into scope to justify that we're allowed to use a value nominally of type `Bool`{.haskell} as one of type `a`. We are free to do this, but it's not obvious if, without a type annotation to guide us, we should. Consider now the following very minor alteration to test: | |||
```haskell | |||
test x y = case x of | |||
T1 n -> n > 0 | |||
T2 -> not y | |||
``` | |||
The only possible type for this program is `T a -> Bool -> Bool`{.haskell}, and so, we can decide without any major complications that the GADT equality should _not_ be used. | |||
# How To Check Types | |||
In this section we'll solve the infinitely simpler problem of elaborating a language with rank-N types and type classes - including functional dependencies - but crucially, no GADTs. To do this we'll use a _bidirectional_, _constraint-based_ elaboration algorithm. | |||
First, bidirectional means that, unlike in a type _inference_ system, type information flows both in and out of the algorithm. Practically speaking, we have two functions to implement the case where type information is an input to the algorithm (`check`{.haskell}) and one where type information is a return from the algorithm (`infer`{.haskell}). | |||
```haskell | |||
infer :: Raw.Expr -> Elab (Core.Term, Core.Type) | |||
check :: Raw.Expr -> Core.Type -> Elab Core.Term | |||
``` | |||
If you know how to infer a type for an expression `e` but you need to check it against a known type `wanted_type`, you can do it by unification, whereas if you know how to check an expression `f` against a type but you need to infer a type for it, you can do it by inventing a new _metavariable_ and checking against that[^2]: | |||
[^2]: In the OutsideIn(X) paper, metavariables are known as _unification variables_. The term _metavariable_ is common in the dependently-typed world, whereas _unification variable_ is more common among Haskell and ML researchers. | |||
<div class=mathpar> | |||
```haskell | |||
check e wanted_type = do | |||
(elab, actual_type) <- infer x | |||
unify wanted_type actual_type | |||
pure elab | |||
``` | |||
```haskell | |||
infer f = do | |||
ty <- newMeta | |||
elab <- check f ty | |||
pure (elab, ty) | |||
``` | |||
</div> | |||
Constraint-based means that, at least conceptually, the algorithm works by first generating constraints by walking through the AST (we do this bidirectionally), and only later solving the generated constraints. But, as a very fruitful optimisation, there are cases where the constraints need not be stashed away for later: If we want to solve a unification problem, for instance, where a metavariable is being compared against a concrete type, and we're free to solve the variable with that type, we might as well do it inline. | |||
_Elaboration_ is a natural extension of "type checking" in which the program is both checked and transformed into a simpler intermediate representation in the same step. The name "type checker" sort-of implies that the output is a boolean (or, more realistically, a list of errors): this is rarely true in practice, but I still prefer the use of the name "elaborator" to make clear that the output is a different _language_ from the input, and not merely a type-annotated version of the input. | |||
I'm going to start by talking about the intermediate language we'll elaborate into, _System $F_C$, first. This is because of an assumption I'm making: I'm assuming most of my readers are familiar with Haskell - at least in passing - but not very familiar with GHC's intermediate language. That's why we start there! | |||
## Our Target Language | |||
System $F_C$, as the name kind-of sort-of implies, is a superset of System $F$, the _second-order_ lambda calculus. For those not in the loop, System F has all the same features of a normal typed lambda calculus (variables, lambda abstraction, application, algebraic data types, and pattern matching[^3]), but additionally features _first class polymorphism_. Roughly, this means that in System F, a `forall`{.haskell} type can appear everywhere a "normal" type can appear - you could form the type `[forall a. a -> a]`{.haskell} of "lists of identity functions", for instance. | |||
[^3]: If you disagree with the inclusion of algebraic data types and pattern matching in the list of features of a "normal typed lambda calculus"---there's nothing you can do about it, this is my blog, lol. | |||
Now, this doesn't mean that first class polymorphism is available to languages that elaborate into System $F_C$ - GHC, for instance, struggled with what they call "impredicative polymorphism" for years, up until very recently. Amulet did a slightly better job because, being a research toy and not a production compiler (that happens to be also be a research toy), there was less code to move around by implementing support for first-class polymorphism. | |||
Since `forall`{.haskell} is a new type former, it also has a corresponding introduction form and elimination form. The introduction rule says that if you can build a term `e : t` in a context where `a` is a type variable of kind `k`, then the term `Λ (a :: k). e` has type `forall (a :: k). σ`{.haskell}. To stick with ASCII for "control" symbols, I'm going to write `Λ (a :: k)` as `\ @(a :: k)`{.haskell}, omitting `κ` if it is obvious - Also, I'm sticking with Haskell notation, even if `::` should really mean cons. | |||
Similarly, the elimination rule says that to consume an expression `e :: forall (a :: k). t`{.haskell}, what we need to do is come up with a _type_ `s :: k`{.haskell}. Then we can _instantiate_ (using a different word so as to not overload "apply") `e`{.haskell} at `s`{.haskell} to get a term `e @s :: t[s/a]`{.haskell} - where `t[s/a]` denotes the substitution of `s` for `a` in `t`, avoiding capture. | |||
Here's a simple Haskell program, and its translation into the notation I'll use for $F_C$. We'll go over it afterwards. | |||
<div class=mathpar> | |||
```haskell | |||
data List a | |||
= Nil | |||
| Cons a (List a) | |||
map :: (a -> b) -> List a -> List a | |||
-- this line intentionally left blank | |||
map f (Cons x xs) = Cons (f x) (map f xs) | |||
map f Nil = Nil | |||
``` | |||
```haskell | |||
data List :: * -> * where | |||
Nil :: forall a. List a | |||
Cons :: forall a. a -> List a -> List a | |||
map :: forall a b. (a -> b) -> List a -> List a | |||
map @a @b f x = case x of | |||
Cons x xs -> Cons @b (f x) (map @a @b f xs) | |||
Nil -> Nil @b | |||
``` | |||
</div> | |||
Let's go over the differences: | |||
* In Haskell, we allow datatype declarations using the Haskell 98 syntax, but in $F_C$ all data types are given in GADT syntax. Furthermore, `List`{.haskell} was given a kind annotation when it was elaborated - the kind of `List`{.haskell} says it maps ground types to ground types. By "ground type" I mean something that's potentially inhabited, e.g. `Int`{.haskell} or `Void`{.haskell}, but not `Maybe`. | |||
Where does the kind annotation come from? Well, we know `List` will have a function kind since it has one argument, and we know its return kind will be `*`{.haskell} since all data types are in `*`{.haskell}. That means we kind-check the constructors with `List :: κ -> a`{.haskell} in scope, where `κ` is a fresh metavariable. The type of `Nil`{.haskell} doesn't fix `κ`, but the type of `Cons`{.haskell} - `a` is used on the left of an arrow, so it must have kind `*`. | |||
* Haskell has definition by equations, but in $F_C$ we simply have type signatures and definitions. We can translate the equations into a case tree using a rather involved - but mechanical - process, and, to avoid that complication, the subset of Haskell our elaborator works will not support equations. It's mostly immaterial to elaboration, anyway. | |||
* In Haskell, the type signature `map :: (a -> b) -> List a -> List b`{.haskell} is written with implicit binders for the type variables `a` and `b`, so that they're seemingly free. This is not the case, of course, and so in $F_C$ we must write out what `forall`{.haskell}s we mean. This is less relevant in this case, where there are no free type variables in the environment, but specifying `forall`{.haskell}s is essential when we have `ScopedTypeVariables`. | |||
* Finally, all of the polymorphism implicit in the Haskell version of the program was made explicit in its elaboration into $F_C$. For instance, the type of the `map`{.haskell} function has two `forall`{.haskell}s, so its definition must begin with a corresponding number of `\@`{.haskell}s (which I moved onto the RHS for presentation purposes - don't want lines getting too wide). | |||
Similarly, the list `Cons`{.haskell}tructors were used as expressions of type `List a` in Haskell, but their $F_C$ types start with a `forall`{.haskell}, meaning we have to instantiate them - `Nil @b`{.haskell}, `Cons @b`{.haskell} - at the return type of the `map` function. | |||
We represent the language using a data type. Syntax productions in the language become constructors of our data type. For clarity of presentation, I'll use `Text`{.haskell}s for variable names. This is a bad idea, and it'll make a lot of you very angry - for good reason! Dealing with binders is _hard_, and using strings for identifiers is quite possibly the worst solution. It'd be more principled to use de Bruijn indices, or locally nameless, or something. But - that's a needless complication, so, in the interest of clarity, I'll just use strings. | |||
Since our language contains type applications, we "need" to define types before expressions. Well, this is a Haskell program, so we don't _need_ to - Haskell programs are not lists of definitions, but rather _directed graphs_ of definitions, so that source order doesn't matter - but for clarity, we define the type of types before the type of expressions. | |||
```haskell | |||
module Core where | |||
import qualified Data.Text as T | |||
import Data.Text (Text) | |||
data Kind | |||
= TypeKi | |||
-- ^ The kind '*' | |||
| ConstraintKi | |||
-- ^ The kind 'Constraint' | |||
| FunKi Kind Kind | |||
-- ^ κ → κ | |||
deriving (Eq, Show) | |||
data Type | |||
= VarTy Text Kind | |||
-- ^ Type variables α | |||
| AppTy Type Type | |||
-- ^ The type being applied is never a constructor, | |||
-- always another AppTy or a VarTy. | |||
| ConTy Text [Type] | |||
-- ^ Type constructor applied to some arguments. | |||
| ForAllTy Text Kind Type | |||
-- ^ Polymorphic types | |||
| FunTy Type Type | |||
-- ^ Function types | |||
deriving (Eq, Show) | |||
``` | |||
Throughout the language, variables (resp. type variables) are annotated with the type (resp. kind) with which they are introduced. More, our type of expressions unifies `\ @a`{.haskell} and `\ x`{.haskell}, as well as both application forms, by delegating to `Binder`{.haskell} and `Arg`{.haskell} types. | |||
```{.haskell style="padding-bottom: 0"} | |||
data Binder = TypeBinder Text | ExprBinder Text | |||
deriving (Eq, Show) | |||
data Arg = TypeArg Type | ExprArg Expr | |||
deriving (Eq, Show) | |||
data Expr | |||
= Var Text Type | |||
| App Expr Arg | |||
| Lam Binder Expr | |||
-- continues | |||
``` | |||
For `Let`{.haskell}, we introduce yet another auxiliary type. A `Bind`{.haskell} represents a _binding group_, a group of mutually recursive definitions. Binding groups do not correspond 1:1 with `let`{.haskell}s in Haskell, for instance, the Haskell program on the left is elaborated into the Core expression on the right: | |||
<div class="mathpar"> | |||
```haskell | |||
let quux x = bar (x - 1) | |||
foo = 1 | |||
bar x = quux x + foo | |||
in foo | |||
``` | |||
```haskell | |||
Let (NonRec "foo" (Lit 1)) $ | |||
Let (Rec [ ("quux", Lam (ExprBinder ...) ... | |||
, ("bar", Lam (ExprBinder ...) ...) ] $ | |||
Var "foo" | |||
``` | |||
</div> | |||
As you can probably imagine, the way I arrived at this definition involves.. Graphs. Yes, it's unfortunate, but it's the only way to correctly describe how Haskell declaration blocks - that includes the top level - are type checked. The Haskell report mandates that declaration groups - in the top level, a `let`{.haskell} expression or a `where`{.haskell} clause - should be sorted into strongly connected components, and type-checked in dependency order. Each of these connected components becomes a `Rec`{.haskell} binding! | |||
We define the auxiliary `Bind`{.haskell} type.. somewhere else, since we still have cases to add to the `Expr`{.haskell}. It's either a connected graph of mutually recursive binders, containing a list of pairs of names and expressions, or a single binder - in which case we unpack the pair. | |||
```{.haskell style="padding-top: 0; padding-bottom: 0;"} | |||
-- continued | |||
| Let [Bind] Expr | |||
data Bind | |||
= NonRec Text Expr | |||
| Rec [(Text, Expr)] | |||
deriving (Eq, Show) | |||
-- continues | |||
``` |
@ -2,6 +2,9 @@ | |||
{-# LANGUAGE MultiWayIf #-} | |||
{-# LANGUAGE OverloadedStrings #-} | |||
{-# LANGUAGE BlockArguments #-} | |||
{-# LANGUAGE LambdaCase #-} | |||
{-# LANGUAGE StandaloneDeriving #-} | |||
{-# LANGUAGE DeriveAnyClass #-} | |||
import Control.DeepSeq (rnf) | |||
import Control.Concurrent | |||
@ -49,6 +52,9 @@ import Data.IORef | |||
import Data.Hashable (Hashable (hashWithSalt)) | |||
import GHC.Stack | |||
import Text.Read (readMaybe) | |||
import GHC.Show (showCommaSpace) | |||
import Data.Traversable | |||
import qualified Data.Text.Lazy as LT | |||
readerOpts :: ReaderOptions | |||
readerOpts = def { readerExtensions = pandocExtensions | |||
@ -71,11 +77,11 @@ writerOptions = do | |||
rssfeed :: FeedConfiguration | |||
rssfeed | |||
= FeedConfiguration { feedTitle = "Abigail's Blag: Latest articles" | |||
= FeedConfiguration { feedTitle = "Amelia's Blag: Latest articles" | |||
, feedDescription = "" | |||
, feedAuthorName = "Abigail Magalhães" | |||
, feedAuthorEmail = "m[email protected]" | |||
, feedRoot = "https://abby.how" | |||
, feedAuthorName = "Amélia" | |||
, feedAuthorEmail = "m[email protected]" | |||
, feedRoot = "https://amelia.how" | |||
} | |||
conf :: Configuration | |||
@ -84,10 +90,9 @@ conf = def { destinationDirectory = ".site" | |||
, tmpDirectory = ".store/tmp" | |||
, deployCommand = "./sync" } | |||
katexFilter :: IORef KatexCache -> Pandoc -> Compiler Pandoc | |||
katexFilter :: MVar KatexCache -> Pandoc -> Compiler Pandoc | |||
katexFilter cacheVar (Pandoc meta doc) = | |||
do | |||
initCache <- unsafeCompiler (readIORef cacheVar) | |||
id <- compilerUnderlying <$> compilerAsk | |||
t <- getMetadata id | |||
@ -108,7 +113,7 @@ katexFilter cacheVar (Pandoc meta doc) = | |||
go :: String -> [String] -> Inline -> Compiler Inline | |||
go id abbrevs (Math kind math) = unsafeCompiler $ do | |||
cache <- readIORef cacheVar | |||
cache <- readMVar cacheVar | |||
case HMap.lookup (id, kind, math) (spanMap cache) of | |||
Just x -> pure (RawInline "html" x) | |||
Nothing -> do | |||
@ -117,7 +122,7 @@ katexFilter cacheVar (Pandoc meta doc) = | |||
(contents, _) <- readProcessBS "node_modules/.bin/katex" args . BS.fromStrict . T.encodeUtf8 $ math | |||
let text = T.init . T.init . T.decodeUtf8 . BS.toStrict $ contents | |||
atomicModifyIORef' cacheVar (\m -> (bumpCacheEntry cache id abbrevs kind text math, ())) | |||
modifyMVar cacheVar (\m -> pure (bumpCacheEntry cache id abbrevs kind text math, ())) | |||
pure $ RawInline "html" text | |||
go id _ x = pure x | |||
@ -164,15 +169,6 @@ abbreviationFilter (Pandoc meta doc) = | |||
in pure (name, rest) | |||
| otherwise = Nothing | |||
estimateReadingTime :: Pandoc -> Pandoc | |||
estimateReadingTime (Pandoc meta doc) = Pandoc meta doc' where | |||
wordCount = T.pack (show (getSum (query inlineLen doc))) | |||
inlineLen (Str s) = Sum (length (T.words s)) | |||
inlineLen _ = mempty | |||
doc' = RawBlock "html" ("<span id=reading-length>" <> wordCount <> "</span>") | |||
: doc | |||
addLanguageTag :: Pandoc -> Pandoc | |||
addLanguageTag (Pandoc meta blocks) = Pandoc meta (map go blocks) where | |||
@ -183,15 +179,20 @@ addLanguageTag (Pandoc meta blocks) = Pandoc meta (map go blocks) where | |||
, "code-container":if haskv then "custom-tag":classes' else classes' | |||
, [] | |||
) | |||
[block, Div (mempty, ["code-tag"], []) [Plain [Span (mempty, [], []) [Str tag]]]] | |||
$ [block] ++ maybe [Div (mempty, ["code-tag"], []) [Plain [Span (mempty, [], []) [Str tag]]]] | |||
where | |||
language' = case T.uncons language of | |||
Nothing -> mempty | |||
Just (c, cs) -> T.cons (toUpper c) cs | |||
tag = fromMaybe language' (lookup "tag" kv) | |||
haskv = fromMaybe False (True <$ lookup "tag" kv) | |||
maybe | |||
| "notag" `elem` classes' = const [] | |||
| otherwise = id | |||
go block@(CodeBlock (identifier, [], kv) text) = Div (mempty, ["code-container"], []) [block] | |||
go block@(CodeBlock (identifier, [], kv) text) = | |||
Div (mempty, ["code-container"], []) | |||
[block, Div (mempty, ["empty-code-tag"], []) []] | |||
go x = x | |||
saveSynopsys :: Pandoc -> Compiler Pandoc | |||
@ -202,7 +203,7 @@ saveSynopsys (Pandoc meta doc) = | |||
case dropWhile (not . isParagraph) doc of | |||
p:ps -> do | |||
saveSnapshot "synopsys" =<< makeItem (take n (p:ps)) | |||
saveSnapshot "synopsys-block" =<< makeItem (map removeFootnotes (take n (p:ps))) | |||
pure () | |||
[] -> pure () | |||
pure $ Pandoc meta doc | |||
@ -210,31 +211,66 @@ saveSynopsys (Pandoc meta doc) = | |||
isParagraph Para{} = True | |||
isParagraph _ = False | |||
sassImporter :: SassImporter | |||
sassImporter = SassImporter 0 go where | |||
go "normalize" _ = do | |||
c <- readFile "node_modules/normalize.css/normalize.css" | |||
pure [ SassImport { importPath = Nothing | |||
, importAbsolutePath = Nothing | |||
, importSource = Just c | |||
, importSourceMap = Nothing | |||
} ] | |||
go _ _ = pure [] | |||
setup :: IO (IORef KatexCache) | |||
removeFootnotes (Para xs) = Para $ filter (\case { Note _ -> False; _ -> True }) xs | |||
removeFootnotes x = x | |||
saveWordCount :: Pandoc -> Compiler Pandoc | |||
saveWordCount (Pandoc meta doc) = | |||
do | |||
saveSnapshot "wc" =<< makeItem wordCount | |||
pure $ Pandoc meta doc | |||
where | |||
wordCount = show (getSum (query inlineLen doc)) | |||
inlineLen (Str s) = Sum (length (T.words s)) | |||
inlineLen _ = mempty | |||
saveTableOfContents :: Pandoc -> Compiler Pandoc | |||
saveTableOfContents (Pandoc meta input) = | |||
do | |||
saveSnapshot "table-of-contents" =<< makeItem toc | |||
pure $ Pandoc meta (fixHeaders 0 doc) | |||
where | |||
headers = filter (\case { Header _ _ _ -> True; _ -> False }) doc | |||
doc = fixHeaders 0 input | |||
fixHeaders n (Header l (_, ms, mt) x:bs) = | |||
Header l (anchor, ms, mt) (Link (anchor, ms, mt) [] (T.singleton '#' <> anchor, mempty):x):fixHeaders (n + 1) bs where | |||
anchor = T.pack ("h" ++ show n) | |||
fixHeaders k (x:bs) = x:fixHeaders k bs | |||
fixHeaders _ [] = [] | |||
into :: [Block] -> [[Block]] | |||
into (Header l m@(anchor, _, _) x:ms) = | |||
let | |||
contained (Header l' _ _) = l' > l | |||
contained _ = undefined | |||
(ours, rest) = span contained ms | |||
in [Para [Link (mempty, mempty, mempty) (tail x) (T.singleton '#' <> anchor, mempty)], list (into ours)]:into rest | |||
into [] = [] | |||
into _ = undefined | |||
list = BulletList | |||
toc :: Block | |||
toc = list (into headers) | |||
setup :: IO (MVar KatexCache) | |||
setup = do | |||
setEnv "AMC_LIBRARY_PATH" "/usr/lib/amuletml/lib/" | |||
loadCache | |||
compiler :: IORef KatexCache -> Compiler (Item String) | |||
compiler :: MVar KatexCache -> Compiler (Item String) | |||
compiler katexCache = do | |||
wops <- writerOptions | |||
pandocCompilerWithTransformM readerOpts wops $ | |||
katexFilter katexCache | |||
>=> abbreviationFilter | |||
>=> saveSynopsys | |||
>=> pure . estimateReadingTime | |||
>=> pure . addLanguageTag | |||
>=> saveWordCount | |||
>=> saveTableOfContents | |||
main :: IO () | |||
main = setup >>= \katexCache -> hakyllWith conf $ do | |||
@ -256,9 +292,21 @@ main = setup >>= \katexCache -> hakyllWith conf $ do | |||
match "css/*.scss" $ do | |||
route $ setExtension "css" | |||
compile $ sassCompilerWith def { sassOutputStyle = SassStyleCompressed | |||
, sassImporters = Just [ sassImporter ] | |||
} | |||
compile $ do | |||
imports <- unsafeCompiler $ newMVar ([] :: [(String, String)]) | |||
let add f p = modifyMVar imports (\x -> pure ((f, p):x, [])) | |||
body <- sassCompilerWith def | |||
{ sassOutputStyle = SassStyleCompressed | |||
, sassImporters = Just [ SassImporter 0 add ] | |||
} | |||
list <- unsafeCompiler $ takeMVar imports | |||
for list $ \(req, path) -> do | |||
load (fromFilePath ("css/" ++ reverse (dropWhile (/= '.') (reverse req)) ++ "scss")) | |||
:: Compiler (Item String) | |||
pure body | |||
match "diagrams/**/*.tex" $ do | |||
route $ setExtension "svg" | |||
@ -281,7 +329,11 @@ main = setup >>= \katexCache -> hakyllWith conf $ do | |||
>>= loadAndApplyTemplate "templates/default.html" postCtx | |||
>>= relativizeUrls | |||
loadSnapshot id "synopsys" >>= saveSnapshot "synopsys" . writePandocWith wops . fmap (Pandoc mempty) | |||
loadSnapshot id "synopsys-block" | |||
>>= saveSnapshot "synopsys-text" | |||
. writePandocWith wops | |||
. fmap (Pandoc mempty) | |||
pure r | |||
match "pages/posts/*.lhs" $ version "raw" $ do | |||
@ -317,6 +369,7 @@ main = setup >>= \katexCache -> hakyllWith conf $ do | |||
match "pages/*.md" $ do | |||
route $ setExtension "html" <> gsubRoute "pages/" (const "") | |||
compile $ compiler katexCache | |||
>>= loadAndApplyTemplate "templates/page.html" defaultContext | |||
>>= loadAndApplyTemplate "templates/default.html" defaultContext | |||
>>= relativizeUrls | |||
@ -324,7 +377,7 @@ main = setup >>= \katexCache -> hakyllWith conf $ do | |||
path <- toFilePath <$> getUnderlying | |||
contents <- itemBody <$> getResourceBody | |||
debugCompiler ("Loaded syntax definition from " ++ show path) | |||
res <- unsafeCompiler (Sky.parseSyntaxDefinitionFromString path contents) | |||
let res = Sky.parseSyntaxDefinitionFromText path (LT.pack contents) | |||
_ <- saveSnapshot "syntax" =<< case res of | |||
Left e -> fail e | |||
Right x -> makeItem x | |||
@ -336,7 +389,7 @@ main = setup >>= \katexCache -> hakyllWith conf $ do | |||
route idRoute | |||
compile $ do | |||
let feedCtx = postCtx <> bodyField "description" | |||
posts <- fmap (take 10) . recentFirst =<< onlyPublic =<< loadAllSnapshots ("pages/posts/*" .&&. hasNoVersion) "synopsys" | |||
posts <- fmap (take 10) . recentFirst =<< onlyPublic =<< loadAllSnapshots ("pages/posts/*" .&&. hasNoVersion) "synopsys-text" | |||
renderRss rssfeed feedCtx posts | |||
onlyPublic :: [Item String] -> Compiler [Item String] | |||
@ -350,12 +403,19 @@ onlyPublic = filterM isPublic where | |||
postCtx :: Context String | |||
postCtx = | |||
dateField "date" "%B %e, %Y" | |||
<> synopsysField | |||
<> snapshotField "synopsys" "synopsys-text" | |||
<> snapshotField "words" "wc" | |||
<> snapshotField' render "toc" "table-of-contents" | |||
<> defaultContext | |||
where | |||
synopsysField = field "synopsys" $ \x -> do | |||
snapshotField = snapshotField' pure | |||
snapshotField' f key snap = field key $ \x -> do | |||
let id = itemIdentifier x | |||
itemBody <$> loadSnapshot id "synopsys" | |||
fmap itemBody . f =<< loadSnapshot id snap | |||
render x = do | |||
wops <- writerOptions | |||
pure . writePandocWith wops . fmap (Pandoc mempty . pure) $ x | |||
readProcessBS :: FilePath -> [String] -> BS.ByteString -> IO (BS.ByteString, String) | |||
readProcessBS path args input = | |||
@ -428,7 +488,7 @@ pathFromTitle meta = | |||
foldMapM :: (Monad w, Monoid m, Foldable f) => (a -> w m) -> f a -> w m | |||
foldMapM k = foldr (\x y -> do { m <- k x; (m <>) <$> y }) (pure mempty) | |||
loadCache :: HasCallStack => IO (IORef KatexCache) | |||
loadCache :: HasCallStack => IO (MVar KatexCache) | |||
loadCache = do | |||
t <- doesFileExist ".katex_cache" | |||
let fixup (a, b, c) = KatexCache (HMap.fromList a) (HMap.fromList b) (HMap.fromList c) | |||
@ -436,16 +496,16 @@ loadCache = do | |||
then (fixup <$> decodeFile ".katex_cache") `catch` \e -> | |||
const (print e *> pure (KatexCache mempty mempty mempty)) (e :: SomeException) | |||
else pure (KatexCache mempty mempty mempty) | |||
var <- newIORef map | |||
var <- newMVar map | |||
pure var | |||
flushCache :: IORef KatexCache -> IO () | |||
flushCache :: MVar KatexCache -> IO () | |||
flushCache var = do | |||
KatexCache x y z <- readIORef var | |||
Data.Binary.encodeFile ".katex_cache" (HMap.toList x, HMap.toList y, HMap.toList z) | |||
withMVar var $ \(KatexCache x y z) -> do | |||
Data.Binary.encodeFile ".katex_cache" (HMap.toList x, HMap.toList y, HMap.toList z) | |||
invalidateCache :: Identifier -> [String] -> IORef KatexCache -> Compiler KatexCache | |||
invalidateCache id abbrevs cacheVar = unsafeCompiler $ atomicModifyIORef' cacheVar (\x -> (go x, go x)) where | |||
invalidateCache :: Identifier -> [String] -> MVar KatexCache -> Compiler KatexCache | |||
invalidateCache id abbrevs cacheVar = unsafeCompiler $ modifyMVar cacheVar (\x -> pure (go x, go x)) where | |||
currentValues = map (\x -> (T.pack (takeWhile (/= ':') (tail x)), T.pack (tail (dropWhile (/= ':') (tail x))))) | |||
$ filter (not . isPrefixOf "-") abbrevs | |||
ident = show id | |||
@ -470,4 +530,25 @@ data KatexCache | |||
instance Hashable MathType where | |||
hashWithSalt s DisplayMath = hashWithSalt s (0 :: Int) | |||
hashWithSalt s InlineMath = hashWithSalt s (1 :: Int) | |||
hashWithSalt s InlineMath = hashWithSalt s (1 :: Int) | |||
deriving instance Binary Block | |||
deriving instance Binary Inline | |||
deriving instance Binary Format | |||
deriving instance Binary ListNumberStyle | |||
deriving instance Binary ListNumberDelim | |||
deriving instance Binary Caption | |||
deriving instance Binary Alignment | |||
deriving instance Binary ColWidth | |||
deriving instance Binary TableHead | |||
deriving instance Binary TableBody | |||
deriving instance Binary TableFoot | |||
deriving instance Binary QuoteType | |||
deriving instance Binary Citation | |||
deriving instance Binary Row | |||
deriving instance Binary MathType | |||
deriving instance Binary RowHeadColumns | |||
deriving instance Binary CitationMode | |||
deriving instance Binary Cell | |||
deriving instance Binary RowSpan | |||
deriving instance Binary ColSpan |
@ -1,6 +1,5 @@ | |||
resolver: lts-16.20 | |||
resolver: lts-18.16 | |||
extra-deps: | |||
- hakyll-4.15.1.0@sha256:df90fdb1e7e09e90557ad23a6d402ce042f820f690595e7e73db82220edcadcf,9924 | |||
- hakyll-sass-0.2.4@sha256:4d2fbd8b63f5ef15483fc6dda09e10eefd63b4f873ad38dec2a3607eb504a8ba,939 | |||
- hsass-0.8.0@sha256:05fb3d435dbdf9f66a98db4e1ee57a313170a677e52ab3a5a05ced1fc42b0834,2899 | |||
- hlibsass-0.1.10.1@sha256:08db56c633e9a83a642d8ea57dffa93112b092d05bf8f3b07491cfee9ee0dfa5,2565 |
@ -1,10 +0,0 @@ | |||
body{color:black;font-size:16px;margin:0px auto 0px | |||
auto;width:80%;font-family:Fira Sans, sans-serif}div#header{border-bottom:2px | |||
solid black;margin-bottom:30px;padding:12px 0px 12px 0px}div#logo | |||
a{color:black;float:left;font-size:18px;font-weight:bold;font-family:Iosevka, | |||
Iosevka Term, Fira Mono, Inconsolata, monospace;text-decoration:none}div > | |||
span.kw{color:#007020}div > span.type{color:#902000}div#header | |||
#navigation{text-align:right}div#header #navigation | |||
a{color:black;font-size:18px;font-weight:bold;margin-left:12px;text-decoration:none;text-transform:uppercase}div#footer{border-top:solid | |||
2px black;color:#555;font-size:12px;margin-top:30px;padding:12px 0px 12px | |||
0px;text-align:right}div#content{text-align:justify;text-justify:inter-word}h1{font-size:24px}h2{font-size:20px}div.info{color:#555;font-size:14px;font-style:italic} |
@ -1,93 +0,0 @@ | |||
Copyright (c) 2013-2017, Jany Belluz ([email protected]) | |||
This Font Software is licensed under the SIL Open Font License, Version 1.1. | |||
This license is copied below, and is also available with a FAQ at: | |||
http://scripts.sil.org/OFL | |||
----------------------------------------------------------- | |||
SIL OPEN FONT LICENSE Version 1.1 - 26 February 2007 | |||
----------------------------------------------------------- | |||
PREAMBLE | |||
The goals of the Open Font License (OFL) are to stimulate worldwide | |||
development of collaborative font projects, to support the font creation | |||
efforts of academic and linguistic communities, and to provide a free and | |||
open framework in which fonts may be shared and improved in partnership | |||
with others. | |||
The OFL allows the licensed fonts to be used, studied, modified and | |||
redistributed freely as long as they are not sold by themselves. The | |||
fonts, including any derivative works, can be bundled, embedded, | |||
redistributed and/or sold with any software provided that any reserved | |||
names are not used by derivative works. The fonts and derivatives, | |||
however, cannot be released under any other type of license. The | |||
requirement for fonts to remain under this license does not apply | |||
to any document created using the fonts or their derivatives. | |||
DEFINITIONS | |||
"Font Software" refers to the set of files released by the Copyright | |||
Holder(s) under this license and clearly marked as such. This may | |||
include source files, build scripts and documentation. | |||
"Reserved Font Name" refers to any names specified as such after the | |||
copyright statement(s). | |||
"Original Version" refers to the collection of Font Software components as | |||
distributed by the Copyright Holder(s). | |||
"Modified Version" refers to any derivative made by adding to, deleting, | |||
or substituting -- in part or in whole -- any of the components of the | |||
Original Version, by changing formats or by porting the Font Software to a | |||
new environment. | |||
"Author" refers to any designer, engineer, programmer, technical | |||
writer or other person who contributed to the Font Software. | |||
PERMISSION & CONDITIONS | |||
Permission is hereby granted, free of charge, to any person obtaining | |||
a copy of the Font Software, to use, study, copy, merge, embed, modify, | |||
redistribute, and sell modified and unmodified copies of the Font | |||
Software, subject to the following conditions: | |||
1) Neither the Font Software nor any of its individual components, | |||
in Original or Modified Versions, may be sold by itself. | |||
2) Original or Modified Versions of the Font Software may be bundled, | |||
redistributed and/or sold with any software, provided that each copy | |||
contains the above copyright notice and this license. These can be | |||
included either as stand-alone text files, human-readable headers or | |||
in the appropriate machine-readable metadata fields within text or | |||
binary files as long as those fields can be easily viewed by the user. | |||
3) No Modified Version of the Font Software may use the Reserved Font | |||
Name(s) unless explicit written permission is granted by the corresponding | |||
Copyright Holder. This restriction only applies to the primary font name as | |||
presented to the users. | |||
4) The name(s) of the Copyright Holder(s) or the Author(s) of the Font | |||
Software shall not be used to promote, endorse or advertise any | |||
Modified Version, except to acknowledge the contribution(s) of the | |||
Copyright Holder(s) and the Author(s) or with their explicit written | |||
permission. | |||
5) The Font Software, modified or unmodified, in part or in whole, | |||
must be distributed entirely under this license, and must not be | |||
distributed under any other license. The requirement for fonts to | |||
remain under this license does not apply to any document created | |||
using the Font Software. | |||
TERMINATION | |||
This license becomes null and void if any of the above conditions are | |||
not met. | |||
DISCLAIMER | |||
THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, | |||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF | |||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT | |||
OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE | |||
COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, | |||
INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL | |||
DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING | |||
FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM | |||
OTHER DEALINGS IN THE FONT SOFTWARE. |
@ -0,0 +1,110 @@ | |||
Copyright 2015-2021, Renzhi Li (aka. Belleve Invis, [email protected]) | |||
This Font Software is licensed under the SIL Open Font License, Version 1.1. | |||
This license is copied below, and is also available with a FAQ at: | |||
http://scripts.sil.org/OFL | |||
-------------------------- | |||
SIL Open Font License v1.1 | |||
==================================================== | |||
Preamble | |||
---------- | |||
The goals of the Open Font License (OFL) are to stimulate worldwide | |||
development of collaborative font projects, to support the font creation | |||
efforts of academic and linguistic communities, and to provide a free and | |||
open framework in which fonts may be shared and improved in partnership | |||
with others. | |||
The OFL allows the licensed fonts to be used, studied, modified and | |||
redistributed freely as long as they are not sold by themselves. The | |||
fonts, including any derivative works, can be bundled, embedded, | |||
redistributed and/or sold with any software provided that any reserved | |||
names are not used by derivative works. The fonts and derivatives, | |||
however, cannot be released under any other type of license. The | |||
requirement for fonts to remain under this license does not apply | |||
to any document created using the fonts or their derivatives. | |||
Definitions | |||
------------- | |||
`"Font Software"` refers to the set of files released by the Copyright | |||
Holder(s) under this license and clearly marked as such. This may | |||
include source files, build scripts and documentation. | |||
`"Reserved Font Name"` refers to any names specified as such after the | |||
copyright statement(s). | |||
`"Original Version"` refers to the collection of Font Software components as | |||
distributed by the Copyright Holder(s). | |||
`"Modified Version"` refers to any derivative made by adding to, deleting, | |||
or substituting -- in part or in whole -- any of the components of the | |||
Original Version, by changing formats or by porting the Font Software to a | |||
new environment. | |||
`"Author"` refers to any designer, engineer, programmer, technical | |||
writer or other person who contributed to the Font Software. | |||
Permission & Conditions | |||
------------------------ | |||
Permission is hereby granted, free of charge, to any person obtaining | |||
a copy of the Font Software, to use, study, copy, merge, embed, modify, | |||
redistribute, and sell modified and unmodified copies of the Font | |||
Software, subject to the following conditions: | |||
1. Neither the Font Software nor any of its individual components, | |||
in Original or Modified Versions, may be sold by itself. | |||
2. Original or Modified Versions of the Font Software may be bundled, | |||
redistributed and/or sold with any software, provided that each copy | |||
contains the above copyright notice and this license. These can be | |||
included either as stand-alone text files, human-readable headers or | |||
in the appropriate machine-readable metadata fields within text or | |||
binary files as long as those fields can be easily viewed by the user. | |||
3. No Modified Version of the Font Software may use the Reserved Font | |||
Name(s) unless explicit written permission is granted by the corresponding | |||
Copyright Holder. This restriction only applies to the primary font name as | |||
presented to the users. | |||
4. The name(s) of the Copyright Holder(s) or the Author(s) of the Font | |||
Software shall not be used to promote, endorse or advertise any | |||
Modified Version, except to acknowledge the contribution(s) of the | |||
Copyright Holder(s) and the Author(s) or with their explicit written | |||
permission. | |||
5. The Font Software, modified or unmodified, in part or in whole, | |||
must be distributed entirely under this license, and must not be | |||
distributed under any other license. The requirement for fonts to | |||
remain under this license does not apply to any document created | |||
using the Font Software. | |||
Termination | |||
----------- | |||
This license becomes null and void if any of the above conditions are | |||
not met. | |||
DISCLAIMER | |||
THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, | |||
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF | |||
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT | |||
OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE | |||
COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, | |||
INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL | |||
DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING | |||
FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM | |||
OTHER DEALINGS IN THE FONT SOFTWARE. |
@ -0,0 +1,200 @@ | |||
Version 2.0, January 2004 | |||
http://www.apache.org/licenses/ | |||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION | |||
1. Definitions. | |||
"License" shall mean the terms and conditions for use, reproduction, | |||
and distribution as defined by Sections 1 through 9 of this document. | |||
"Licensor" shall mean the copyright owner or entity authorized by | |||
the copyright owner that is granting the License. | |||
"Legal Entity" shall mean the union of the acting entity and all | |||
other entities that control, are controlled by, or are under common | |||
control with that entity. For the purposes of this definition, | |||
"control" means (i) the power, direct or indirect, to cause the | |||
direction or management of such entity, whether by contract or | |||
otherwise, or (ii) ownership of fifty percent (50%) or more of the | |||
outstanding shares, or (iii) beneficial ownership of such entity. | |||
"You" (or "Your") shall mean an individual or Legal Entity | |||
exercising permissions granted by this License. | |||
"Source" form shall mean the preferred form for making modifications, | |||
including but not limited to software source code, documentation | |||
source, and configuration files. | |||
"Object" form shall mean any form resulting from mechanical | |||
transformation or translation of a Source form, including but | |||
not limited to compiled object code, generated documentation, | |||
and conversions to other media types. | |||
"Work" shall mean the work of authorship, whether in Source or | |||
Object form, made available under the License, as indicated by a | |||
copyright notice that is included in or attached to the work | |||
(an example is provided in the Appendix below). | |||
"Derivative Works" shall mean any work, whether in Source or Object | |||
form, that is based on (or derived from) the Work and for which the | |||
editorial revisions, annotations, elaborations, or other modifications | |||
represent, as a whole, an original work of authorship. For the purposes | |||
of this License, Derivative Works shall not include works that remain | |||
separable from, or merely link (or bind by name) to the interfaces of, | |||
the Work and Derivative Works thereof. | |||
"Contribution" shall mean any work of authorship, including | |||
the original version of the Work and any modifications or additions | |||
to that Work or Derivative Works thereof, that is intentionally | |||
submitted to Licensor for inclusion in the Work by the copyright owner | |||
or by an individual or Legal Entity authorized to submit on behalf of | |||
the copyright owner. For the purposes of this definition, "submitted" | |||
means any form of electronic, verbal, or written communication sent | |||
to the Licensor or its representatives, including but not limited to | |||
communication on electronic mailing lists, source code control systems, | |||
and issue tracking systems that are managed by, or on behalf of, the | |||
Licensor for the purpose of discussing and improving the Work, but | |||
excluding communication that is conspicuously marked or otherwise | |||
designated in writing by the copyright owner as "Not a Contribution." | |||
"Contributor" shall mean Licensor and any individual or Legal Entity | |||
on behalf of whom a Contribution has been received by Licensor and | |||
subsequently incorporated within the Work. | |||
2. Grant of Copyright License. Subject to the terms and conditions of | |||
this License, each Contributor hereby grants to You a perpetual, | |||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable | |||
copyright license to reproduce, prepare Derivative Works of, | |||
publicly display, publicly perform, sublicense, and distribute the | |||
Work and such Derivative Works in Source or Object form. | |||
3. Grant of Patent License. Subject to the terms and conditions of | |||
this License, each Contributor hereby grants to You a perpetual, | |||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable | |||
(except as stated in this section) patent license to make, have made, | |||
use, offer to sell, sell, import, and otherwise transfer the Work, | |||
where such license applies only to those patent claims licensable | |||
by such Contributor that are necessarily infringed by their | |||
Contribution(s) alone or by combination of their Contribution(s) | |||
with the Work to which such Contribution(s) was submitted. If You | |||
institute patent litigation against any entity (including a | |||
cross-claim or counterclaim in a lawsuit) alleging that the Work | |||
or a Contribution incorporated within the Work constitutes direct | |||
or contributory patent infringement, then any patent licenses | |||
granted to You under this License for that Work shall terminate | |||
as of the date such litigation is filed. | |||
4. Redistribution. You may reproduce and distribute copies of the | |||
Work or Derivative Works thereof in any medium, with or without | |||
modifications, and in Source or Object form, provided that You | |||
meet the following conditions: | |||
(a) You must give any other recipients of the Work or | |||
Derivative Works a copy of this License; and | |||
(b) You must cause any modified files to carry prominent notices | |||
stating that You changed the files; and | |||
(c) You must retain, in the Source form of any Derivative Works | |||
that You distribute, all copyright, patent, trademark, and | |||
attribution notices from the Source form of the Work, | |||
excluding those notices that do not pertain to any part of | |||
the Derivative Works; and | |||
(d) If the Work includes a "NOTICE" text file as part of its | |||
distribution, then any Derivative Works that You distribute must | |||
include a readable copy of the attribution notices contained | |||
within such NOTICE file, excluding those notices that do not | |||
pertain to any part of the Derivative Works, in at least one | |||
of the following places: within a NOTICE text file distributed | |||
as part of the Derivative Works; within the Source form or | |||
documentation, if provided along with the Derivative Works; or, | |||
within a display generated by the Derivative Works, if and | |||
wherever such third-party notices normally appear. The contents | |||
of the NOTICE file are for informational purposes only and | |||
do not modify the License. You may add Your own attribution | |||
notices within Derivative Works that You distribute, alongside | |||
or as an addendum to the NOTICE text from the Work, provided | |||
that such additional attribution notices cannot be construed | |||
as modifying the License. | |||
You may add Your own copyright statement to Your modifications and | |||
may provide additional or different license terms and conditions | |||
for use, reproduction, or distribution of Your modifications, or | |||
for any such Derivative Works as a whole, provided Your use, | |||
reproduction, and distribution of the Work otherwise complies with | |||
the conditions stated in this License. | |||
5. Submission of Contributions. Unless You explicitly state otherwise, | |||
any Contribution intentionally submitted for inclusion in the Work | |||
by You to the Licensor shall be under the terms and conditions of | |||
this License, without any additional terms or conditions. | |||
Notwithstanding the above, nothing herein shall supersede or modify | |||
the terms of any separate license agreement you may have executed | |||
with Licensor regarding such Contributions. | |||
6. Trademarks. This License does not grant permission to use the trade | |||
names, trademarks, service marks, or product names of the Licensor, | |||
except as required for reasonable and customary use in describing the | |||
origin of the Work and reproducing the content of the NOTICE file. | |||
7. Disclaimer of Warranty. Unless required by applicable law or | |||
agreed to in writing, Licensor provides the Work (and each | |||
Contributor provides its Contributions) on an "AS IS" BASIS, | |||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or | |||
implied, including, without limitation, any warranties or conditions | |||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A | |||
PARTICULAR PURPOSE. You are solely responsible for determining the | |||
appropriateness of using or redistributing the Work and assume any | |||
risks associated with Your exercise of permissions under this License. | |||
8. Limitation of Liability. In no event and under no legal theory, | |||
whether in tort (including negligence), contract, or otherwise, | |||
unless required by applicable law (such as deliberate and grossly | |||
negligent acts) or agreed to in writing, shall any Contributor be | |||
liable to You for damages, including any direct, indirect, special, | |||
incidental, or consequential damages of any character arising as a | |||
result of this License or out of the use or inability to use the | |||
Work (including but not limited to damages for loss of goodwill, | |||
work stoppage, computer failure or malfunction, or any and all | |||
other commercial damages or losses), even if such Contributor | |||
has been advised of the possibility of such damages. | |||
9. Accepting Warranty or Additional Liability. While redistributing | |||
the Work or Derivative Works thereof, You may choose to offer, | |||
and charge a fee for, acceptance of support, warranty, indemnity, | |||
or other liability obligations and/or rights consistent with this | |||
License. However, in accepting such obligations, You may act only | |||
on Your own behalf and on Your sole responsibility, not on behalf | |||
of any other Contributor, and only if You agree to indemnify, | |||
defend, and hold each Contributor harmless for any liability | |||
incurred by, or claims asserted against, such Contributor by reason | |||
of your accepting any such warranty or additional liability. | |||
END OF TERMS AND CONDITIONS | |||
APPENDIX: How to apply the Apache License to your work. | |||
To apply the Apache License to your work, attach the following | |||
boilerplate notice, with the fields enclosed by brackets "[]" | |||
replaced with your own identifying information. (Don't include | |||
the brackets!) The text should be enclosed in the appropriate | |||
comment syntax for the file format. We also recommend that a | |||
file or class name and description of purpose be included on the | |||
same "printed page" as the copyright notice for easier | |||
identification within third-party archives. | |||
Copyright [yyyy] [name of copyright owner] | |||
Licensed under the Apache License, Version 2.0 (the "License"); | |||
you may not use this file except in compliance with the License. | |||
You may obtain a copy of the License at | |||
http://www.apache.org/licenses/LICENSE-2.0 | |||
Unless required by applicable law or agreed to in writing, software | |||
distributed under the License is distributed on an "AS IS" BASIS, | |||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
See the License for the specific language governing permissions and | |||
limitations under the License. |
@ -0,0 +1,31 @@ | |||
<?xml version="1.0" encoding="UTF-8" standalone="no"?> | |||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" | |||
"http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd"> | |||
<!-- Generated by graphviz version 2.48.0 (0) | |||
--> | |||
<!-- Title: G Pages: 1 --> | |||
<svg width="112pt" height="116pt" | |||
viewBox="0.00 0.00 111.99 116.00" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink"> | |||
<g id="graph0" class="graph" transform="scale(1 1) rotate(0) translate(4 112)"> | |||
<title>G</title> | |||
<polygon fill="white" stroke="transparent" points="-4,4 -4,-112 107.99,-112 107.99,4 -4,4"/> | |||
<!-- stack --> | |||
<g id="node1" class="node"> | |||
<title>stack</title> | |||
<ellipse fill="none" stroke="black" cx="52" cy="-90" rx="35.19" ry="18"/> | |||
<text text-anchor="middle" x="52" y="-86.3" font-family="Times,serif" font-size="14.00">stack</text> | |||
</g> | |||
<!-- 3628800 --> | |||
<g id="node2" class="node"> | |||
<title>3628800</title> | |||
<ellipse fill="none" stroke="black" cx="52" cy="-18" rx="51.99" ry="18"/> | |||
<text text-anchor="middle" x="52" y="-14.3" font-family="Times,serif" font-size="14.00">3628800</text> | |||
</g> | |||
<!-- stack->3628800 --> | |||
<g id="edge1" class="edge"> | |||
<title>stack->3628800</title> | |||
<path fill="none" stroke="black" d="M52,-71.7C52,-63.98 52,-54.71 52,-46.11"/> | |||
<polygon fill="black" stroke="black" points="55.5,-46.1 52,-36.1 48.5,-46.1 55.5,-46.1"/> | |||
</g> | |||
</g> | |||
</svg> |
@ -0,0 +1,10 @@ | |||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" viewBox="-13 -14 26 26"> | |||
<path style="fill:#5fbcd3" d="M 9.51057,-3.09017 -1.83697e-15,-10 -9.51057,-3.09017 -5.87785,8.09017 5.87785,8.09017 9.51057,-3.09017 z" /> | |||
<path style="fill:#216778" d="M 10.7181,-2.47446 12.6668,-2.92436 8.52877,9.81122 7.21665,8.30181 z" /> | |||
<path style="fill:#216778" d="M 0.958713,-10.9581 1.13302,-12.9505 11.9666,-5.0795 10.1256,-4.29804 z" /> | |||
<path style="fill:#216778" d="M -10.1256,-4.29804 -11.9666,-5.0795 -1.13302,-12.9505 -0.958713,-10.9581 z" /> | |||
<path style="fill:#216778" d="M -7.21665,8.30181 -8.52877,9.81122 -12.6668,-2.92436 -10.7181,-2.47446 z" /> | |||
<path style="fill:#216778" d="M 5.66542,9.42884 6.69549,11.1432 -6.69549,11.1432 -5.66542,9.42884 z" /> | |||
<path style="fill:#216778" d="M 10.7181,-2.47446 12.6668,-2.92436 8.52877,9.81122 7.21665,8.30181 z" /> | |||
<path style="fill:#ffffff" d="M 0,4.5424805 2.0141602,-0.89941406 H -2.0068359 Z M -0.83496094,6 -5.0097656,-4.9350586 h 1.5600586 l 0.9960937,2.8051758 H 2.4755859 L 3.4716797,-4.9350586 H 5.0097656 L 0.84228516,6 Z" /> | |||
</svg> |
@ -0,0 +1,31 @@ | |||
<?xml version="1.0" encoding="utf-8"?> | |||
<svg version="1.1" id="main_outline" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" x="0px" | |||
y="0px" viewBox="-40 -40 720 720" style="enable-background:new 0 0 640 640;" xml:space="preserve"> | |||
<g> | |||
<path id="teabag" style="fill:#609926" d="M395.9,484.2l-126.9-61c-12.5-6-17.9-21.2-11.8-33.8l61-126.9c6-12.5,21.2-17.9,33.8-11.8 | |||
c17.2,8.3,27.1,13,27.1,13l-0.1-109.2l16.7-0.1l0.1,117.1c0,0,57.4,24.2,83.1,40.1c3.7,2.3,10.2,6.8,12.9,14.4 | |||
c2.1,6.1,2,13.1-1,19.3l-61,126.9C423.6,484.9,408.4,490.3,395.9,484.2z"/> | |||
<g> | |||
<g> | |||
<path style="fill:#fff" d="M622.7,149.8c-4.1-4.1-9.6-4-9.6-4s-117.2,6.6-177.9,8c-13.3,0.3-26.5,0.6-39.6,0.7c0,39.1,0,78.2,0,117.2 | |||
c-5.5-2.6-11.1-5.3-16.6-7.9c0-36.4-0.1-109.2-0.1-109.2c-29,0.4-89.2-2.2-89.2-2.2s-141.4-7.1-156.8-8.5 | |||
c-9.8-0.6-22.5-2.1-39,1.5c-8.7,1.8-33.5,7.4-53.8,26.9C-4.9,212.4,6.6,276.2,8,285.8c1.7,11.7,6.9,44.2,31.7,72.5 | |||
c45.8,56.1,144.4,54.8,144.4,54.8s12.1,28.9,30.6,55.5c25,33.1,50.7,58.9,75.7,62c63,0,188.9-0.1,188.9-0.1s12,0.1,28.3-10.3 | |||
c14-8.5,26.5-23.4,26.5-23.4s12.9-13.8,30.9-45.3c5.5-9.7,10.1-19.1,14.1-28c0,0,55.2-117.1,55.2-231.1 | |||
C633.2,157.9,624.7,151.8,622.7,149.8z M125.6,353.9c-25.9-8.5-36.9-18.7-36.9-18.7S69.6,321.8,60,295.4 | |||
c-16.5-44.2-1.4-71.2-1.4-71.2s8.4-22.5,38.5-30c13.8-3.7,31-3.1,31-3.1s7.1,59.4,15.7,94.2c7.2,29.2,24.8,77.7,24.8,77.7 | |||
S142.5,359.9,125.6,353.9z M425.9,461.5c0,0-6.1,14.5-19.6,15.4c-5.8,0.4-10.3-1.2-10.3-1.2s-0.3-0.1-5.3-2.1l-112.9-55 | |||
c0,0-10.9-5.7-12.8-15.6c-2.2-8.1,2.7-18.1,2.7-18.1L322,273c0,0,4.8-9.7,12.2-13c0.6-0.3,2.3-1,4.5-1.5c8.1-2.1,18,2.8,18,2.8 | |||
l110.7,53.7c0,0,12.6,5.7,15.3,16.2c1.9,7.4-0.5,14-1.8,17.2C474.6,363.8,425.9,461.5,425.9,461.5z"/> | |||
<path style="fill:#fff" d="M326.8,380.1c-8.2,0.1-15.4,5.8-17.3,13.8c-1.9,8,2,16.3,9.1,20c7.7,4,17.5,1.8,22.7-5.4 | |||
c5.1-7.1,4.3-16.9-1.8-23.1l24-49.1c1.5,0.1,3.7,0.2,6.2-0.5c4.1-0.9,7.1-3.6,7.1-3.6c4.2,1.8,8.6,3.8,13.2,6.1 | |||
c4.8,2.4,9.3,4.9,13.4,7.3c0.9,0.5,1.8,1.1,2.8,1.9c1.6,1.3,3.4,3.1,4.7,5.5c1.9,5.5-1.9,14.9-1.9,14.9 | |||
c-2.3,7.6-18.4,40.6-18.4,40.6c-8.1-0.2-15.3,5-17.7,12.5c-2.6,8.1,1.1,17.3,8.9,21.3c7.8,4,17.4,1.7,22.5-5.3 | |||
c5-6.8,4.6-16.3-1.1-22.6c1.9-3.7,3.7-7.4,5.6-11.3c5-10.4,13.5-30.4,13.5-30.4c0.9-1.7,5.7-10.3,2.7-21.3 | |||
c-2.5-11.4-12.6-16.7-12.6-16.7c-12.2-7.9-29.2-15.2-29.2-15.2s0-4.1-1.1-7.1c-1.1-3.1-2.8-5.1-3.9-6.3c4.7-9.7,9.4-19.3,14.1-29 | |||
c-4.1-2-8.1-4-12.2-6.1c-4.8,9.8-9.7,19.7-14.5,29.5c-6.7-0.1-12.9,3.5-16.1,9.4c-3.4,6.3-2.7,14.1,1.9,19.8 | |||
C343.2,346.5,335,363.3,326.8,380.1z"/> | |||
</g> | |||
</g> | |||
</g> | |||
</svg> |
@ -0,0 +1,3 @@ | |||
<svg viewBox="0 0 32.58 31.77" xmlns="http://www.w3.org/2000/svg" width="1em" height="1em"> | |||
<path d="M7.953 6.754c-.177.008-.264.035-.264.035a5.94 5.94 0 0 0-.16 4.31 6.3 6.3 0 0 0-1.67 4.372c0 6.19 3.8 7.59 7.42 8a3.46 3.46 0 0 0-1 2.18 3.48 3.48 0 0 1-4.74-1.362 3.43 3.43 0 0 0-2.488-1.68s-1.591 0-.121 1A4.36 4.36 0 0 1 6.74 26s.96 3.18 5.51 2.19v2.771c0 .376-.221.813-.826.816l9.69-.048.019-.026c-.563-.006-.793-.346-.793-.793v-4.47a3.85 3.85 0 0 0-1.11-3c3.63-.37 7.44-1.74 7.44-8A6.3 6.3 0 0 0 25 11.07a5.91 5.91 0 0 0-.2-4.28s-1.36-.439-4.47 1.671a15.41 15.41 0 0 0-8.16 0C9.837 6.88 8.484 6.73 7.953 6.754zm3.445 25.02.004.003h.02l-.024-.002z" fill="#fff" /> | |||
</svg> |
@ -0,0 +1,46 @@ | |||
<?xml version="1.0" encoding="UTF-8" standalone="no"?> | |||
<svg | |||
width="600pt" | |||
height="600pt" | |||
viewBox="0 0 600 600" | |||
version="1.2" | |||
id="svg7" | |||
sodipodi:docname="kofi.svg" | |||
inkscape:version="1.1.1 (3bf5ae0d25, 2021-09-20, custom)" | |||
xmlns:inkscape="http://www.inkscape.org/namespaces/inkscape" | |||
xmlns:sodipodi="http://sodipodi.sourceforge.net/DTD/sodipodi-0.dtd" | |||
xmlns="http://www.w3.org/2000/svg" | |||
xmlns:svg="http://www.w3.org/2000/svg"> | |||
<defs | |||
id="defs11" /> | |||
<sodipodi:namedview | |||
id="namedview9" | |||
pagecolor="#ffffff" | |||
bordercolor="#666666" | |||
borderopacity="1.0" | |||
inkscape:pageshadow="2" | |||
inkscape:pageopacity="0.0" | |||
inkscape:pagecheckerboard="0" | |||
inkscape:document-units="pt" | |||
showgrid="false" | |||
inkscape:zoom="0.78" | |||
inkscape:cx="399.35897" | |||
inkscape:cy="399.35897" | |||
inkscape:window-width="1920" | |||
inkscape:window-height="1080" | |||
inkscape:window-x="0" | |||
inkscape:window-y="0" | |||
inkscape:window-maximized="1" | |||
inkscape:current-layer="surface477" /> | |||
<g | |||
id="surface477"> | |||
<path | |||
style=" stroke:none;fill-rule:nonzero;fill:rgb(100%,100%,100%);fill-opacity:1;" | |||
d="M 548.425781 254.132812 C 542.109375 220.757812 524.140625 199.96875 505.730469 187.113281 C 486.6875 173.820312 463.882812 166.988281 440.660156 166.988281 L 111.074219 166.988281 C 99.605469 166.988281 95.214844 178.183594 95.171875 183.789062 C 95.167969 184.519531 95.195312 187.445312 95.195312 187.445312 C 95.195312 187.445312 94.652344 333.257812 95.675781 411.140625 C 98.785156 457.117188 144.851562 457.101562 144.851562 457.101562 C 144.851562 457.101562 295.257812 456.660156 367.386719 456.214844 C 370.769531 456.191406 374.148438 455.828125 377.429688 455.007812 C 418.5 444.730469 422.75 406.578125 422.308594 385.308594 C 504.859375 389.894531 563.101562 331.644531 548.425781 254.132812 M 455.695312 329.808594 C 438.09375 332.007812 423.789062 330.355469 423.789062 330.355469 L 423.789062 222.5625 L 445.449219 222.5625 C 459.765625 222.5625 473.578125 228.523438 482.917969 239.371094 C 489.496094 247.007812 494.757812 257.757812 494.757812 272.609375 C 494.757812 308.90625 476.050781 323.207031 455.695312 329.808594 " | |||
id="path2" /> | |||
<path | |||
style=" stroke:none;fill-rule:nonzero;fill:rgb(94.343567%,26.055908%,33.364868%);fill-opacity:1;" | |||
d="M 256.613281 396.632812 C 260.199219 398.4375 262.488281 396.195312 262.488281 396.195312 C 262.488281 396.195312 314.945312 348.316406 338.578125 320.742188 C 359.597656 296.078125 360.96875 254.511719 324.871094 238.976562 C 288.773438 223.445312 259.074219 257.25 259.074219 257.25 C 233.320312 228.925781 194.34375 230.359375 176.3125 249.527344 C 158.285156 268.699219 164.582031 301.601562 178.03125 319.910156 C 190.65625 337.101562 246.148438 386.5625 254.558594 394.925781 C 254.558594 394.925781 255.171875 395.566406 256.613281 396.632812 " | |||
id="path4" /> | |||
</g> | |||
</svg> |
@ -0,0 +1,3 @@ | |||
<svg viewBox="0 0 400 400" xmlns="http://www.w3.org/2000/svg" width="1em" height="1em"> | |||
<path fill="#fff" d="M163.4 305.5c88.7 0 137.2-73.5 137.2-137.2 0-2.1 0-4.2-.1-6.2 9.4-6.8 17.6-15.3 24.1-25-8.6 3.8-17.9 6.4-27.7 7.6 10-6 17.6-15.4 21.2-26.7-9.3 5.5-19.6 9.5-30.6 11.7-8.8-9.4-21.3-15.2-35.2-15.2-26.6 0-48.2 21.6-48.2 48.2 0 3.8.4 7.5 1.3 11-40.1-2-75.6-21.2-99.4-50.4-4.1 7.1-6.5 15.4-6.5 24.2 0 16.7 8.5 31.5 21.5 40.1-7.9-.2-15.3-2.4-21.8-6v.6c0 23.4 16.6 42.8 38.7 47.3-4 1.1-8.3 1.7-12.7 1.7-3.1 0-6.1-.3-9.1-.9 6.1 19.2 23.9 33.1 45 33.5-16.5 12.9-37.3 20.6-59.9 20.6-3.9 0-7.7-.2-11.5-.7 21.1 13.8 46.5 21.8 73.7 21.8" /> | |||
</svg> |