commit 21bc57abd496dcbc99cf859249e0b949547be6b8 Author: Amélia Liao Date: Fri Jan 21 20:41:59 2022 -0300 Reinit diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..ae50967 --- /dev/null +++ b/.gitignore @@ -0,0 +1,15 @@ +/.site +/.store +/.stack-work +/.vscode + +/node_modules + +/uni +/portfolio.md +/fonts +/css/fonts + +/.mailmap + +.katex_cache \ No newline at end of file diff --git a/blag.cabal b/blag.cabal new file mode 100644 index 0000000..5ae9ec3 --- /dev/null +++ b/blag.cabal @@ -0,0 +1,27 @@ +name: blag +version: 0.1.0.0 +build-type: Simple +cabal-version: >= 1.10 + +executable site + main-is: site.hs + build-depends: base + , text + , hsass + , aeson + , hakyll + , pandoc + , binary + , process + , deepseq + , hashable + , directory + , containers + , bytestring + , uri-encode + , hakyll-sass + , skylighting + , pandoc-types + , unordered-containers + ghc-options: -threaded + default-language: Haskell2010 \ No newline at end of file diff --git a/css/agda.scss b/css/agda.scss new file mode 100644 index 0000000..03ae77b --- /dev/null +++ b/css/agda.scss @@ -0,0 +1,98 @@ +$code-bg: hsl(230,1%,98%); +$code-fg: #ABB2BF; +$code-red: #D65122; +$code-red-br: #AE3B36; +$code-green: #88B966; +$code-yellow: #DEB468; +$code-orange: #C58853; +$code-blue: #519DEB; +$code-pink: #C678DD; +$code-cyan: #48A8B5; +$code-white: #ABB2BF; +$code-grey: #7F848E; + +@font-face { + font-family: 'Iosevka'; + font-display: swap; + font-weight: 400; + font-stretch: normal; + font-style: normal; + src: url('/static/woff2/iosevk-abbie-regular.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-regular.ttf') format('truetype'); +} + +body { + margin: 0; + background-color:$code-bg; + color:#ABB2BF; +} + +html { + background-color: $code-bg; + color:#ABB2BF; +} + +pre.Agda { + margin: 0; + padding: 1em; + background-color: $code-bg; + color: $code-fg; +} + +@keyframes highlight { + 0% { + background-color: #F5DEB3; + } + 100% { + background-color: $code-bg; + } +} + +/* Aspects. */ +.Agda { + .Comment { color: $code-grey; } + .Background { background-color: $code-bg; } + .Markup { color: $code-fg; } + .Keyword { color: $code-orange; } + .String { color: $code-red; } + .Number { color: $code-pink; } + .Symbol { color: $code-fg; } + .PrimitiveType { color: $code-blue; } + .Pragma { color: $code-fg; } + +/* NameKinds. */ + .Bound { color: $code-fg; } + .Generalizable { color: $code-fg; } + .InductiveConstructor { color: $code-green; } + .CoinductiveConstructor { color: $code-green; } + .Datatype { color: $code-blue; } + .Field { color: #F570B7; } + .Function { color: $code-blue; } + .Module { color: $code-pink; } + .Postulate { color: $code-blue; } + .Primitive { color: $code-blue; } + .Record { color: $code-blue; } + +/* OtherAspects. */ + .UnsolvedMeta { color: $code-fg; background: yellow } + .UnsolvedConstraint { color: $code-fg; background: yellow } + .TerminationProblem { color: $code-fg; background: #FFA07A } + .IncompletePattern { color: $code-fg; background: #F5DEB3 } + .Error { color: red; text-decoration: underline } + .TypeChecks { color: $code-fg; background: #ADD8E6 } + .Deadcode { color: $code-fg; background: #808080 } + .ShadowingInTelescope { color: $code-fg; background: #808080 } + + /* Standard attributes. */ + a { text-decoration: none } + a[href]:hover { + text-decoration: 2px #B4EEB4 underline dotted; + } + a[href]:target { + animation: highlight 2.5s; + } + + background-color: #282C34; + font-family: 'Iosevka', 'Fantasque Sans Mono', 'Roboto Mono', monospace; + font-weight: 400; + font-size: 16pt; +} \ No newline at end of file diff --git a/css/default.scss b/css/default.scss new file mode 100644 index 0000000..ef0e636 --- /dev/null +++ b/css/default.scss @@ -0,0 +1,528 @@ +@import "vars.scss"; + +@mixin center-that-bitch { + display: flex; + flex-direction: column; + align-items: center; +} + +html { + min-height: 100%; + height: 100%; + max-width: 100%; + margin: 0; + + overflow-x: clip; +} + +body { + font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, Oxygen, Ubuntu, Cantarell, 'Open Sans', 'Helvetica Neue', sans-serif; + + width: 100%; + + @include center-that-bitch; + + margin: 0; + + color: $bluegray-900; + font-size: $font-size; + + overflow-x: clip; +} + +body > header { + background-color: $purple-600; + display: flex; + height: $nav-height; + width: 100%; + z-index: 999; + + position: fixed; + + justify-content: space-between; + + align-items: center; + + padding-left: 1em; + padding-right: 1em; + + border-bottom: 3px solid $purple-700; + box-sizing: border-box; + + div, nav > a { + height: $nav-height; + + padding-left: 0.3em; + padding-right: 0.3em; + + display: flex; + align-items: center; + } + + div:hover, nav > a:hover { + background-color: $purple-500; + transition: background-color 0.3s ease-in-out; + } + + a { + color: white; + font-size: $font-size * 1.2; + text-decoration: none; + } + + nav { + display: flex; + align-items: center; + gap: 0.5em; + } +} + +@mixin left-bordered-block($color) { + padding-left: 1em; + padding-top: 0.2em; + padding-bottom: 0.2em; + + border-left: 5px dashed $color; +} + +@mixin material { + padding: 1em; + + margin-top: 1em; + margin-bottom: 1em; + + box-shadow: 2px 2px 6px black; + + border-radius: 10px; +} + +main { + max-width: 100ch; + width: 100%; + margin: 0px auto 0px auto; + flex: 1 0 auto; + + padding: 2ch; + padding-top: $nav-height; + + box-sizing: border-box; + + div#title h2 { + display: none; + } + + div#post-toc-container { + aside#toc { + display: none; + } + + article { + grid-column: 2; + width: 100%; + line-height: 1.5; + } + + } + + div#post-info { + font-style: italic; + line-height: 1.2; + + @include left-bordered-block($bluegray-500); + } +} + +div.warning { + @include material; + background-color: $red-200; +} + +figure.wraparound { + float: right; + width: auto; + + margin-left: 2em; +} + +figure { + overflow-x: auto; + overflow-y: clip; + width: 100%; + margin: auto; + + @include center-that-bitch; + + figcaption { + margin-top: 0.3em; + display: inline-block; + text-align: center; + } + + p { + margin: 0; + } +} + +ol, ul { + padding-left: 1.2em; + li { + margin-top: 5px; + margin-bottom: 5px; + p { + margin-top: 5px; + margin-bottom: 5px; + } + } +} + +.katex-display { + > span.katex { + white-space: normal; + } +} + +div.mathpar { + display: flex; + flex-flow: row wrap; + justify-content: center; + align-items: center; + + + gap: 1em; + + > figure { + width: auto; + max-width: 33%; + } +} + +div.columns { + blockquote, details.blockquote { + padding-right: 1em; + padding-left: 1em; + padding-top: 0.2em; + padding-bottom: 0.2em; + + border: 0; + } +} + +code, pre, .sourceCode { + font-size: $font-size; + font-family: 'Iosevka', 'Fantasque Sans Mono', Consolas, "Andale Mono WT", "Andale Mono", "Lucida Console", "Lucida Sans Typewriter", "DejaVu Sans Mono", "Bitstream Vera Sans Mono", "Liberation Mono", "Nimbus Mono L", Monaco, "Courier New", Courier, monospace; + font-weight: 500; +} + +div.sourceCode, pre { + background-color: $code-bg; + color: $code-fg; + flex-grow: 0; + + @include material; + + overflow-x: auto; + line-height: 1.2; + + code { + display: block; + } + + > pre { + padding: unset; + margin-top: unset; + margin-bottom: unset; + box-shadow: unset; + + margin: 0; + + overflow-y: clip; + } +} + +p > code { + white-space: nowrap; +} + +blockquote, details.blockquote { + @include material; + + background-color: $purple-100; + + margin-left: 0; + margin-right: 0; + + h2 { + margin-top: 0; + } +} + +table { + width: 70%; + margin: auto; + border-collapse: collapse; + + td, th { + text-align: center; + padding: 0px 1em 0px 1em; + border: 2px solid $purple-400; + } +} + +ul#post-list { + list-style-type: none; + display: flex; + flex-direction: column; + + .post-list-item { + @include left-bordered-block($yellow-500); + @include material; + + margin: 0; + + background-color: $yellow-50; + + .post-list-header { + margin-top: 0.2em; + + display: flex; + justify-content: space-between; + line-height: 14pt; + + font-style: italic; + font-size: 10pt; + + a { + font-size: 14pt; + font-style: normal; + color: $bluegray-800; + } + } + } +} + +div.contact-list { + display: flex; + justify-content: space-evenly; + align-items: stretch; + gap: 3em; + + div.contact-card { + background-color: $purple-200; + @include material; + + width: 33%; + max-width: 33%; + flex-grow: 1; + + p { + margin: 0; + } + + div.contact-header { + // I really hate Pandoc sometimes + display: flex; + align-items: center; + gap: 1em; + + margin-bottom: 10px; + + img { + height: 48px; + clip-path: url(#squircle); + } + + span.username { + font-size: 16pt; + } + } + } +} + +@media only screen and (max-width: 450px) { + body > header { + div#logo { + width: 100%; + display: flex; + flex-direction: row; + justify-content: center; + } + nav { + display: none; + } + } +} + +@media only screen and (min-width: 1500px) { + .narrow-only { + display: none !important; + } + + main { + max-width: 100%; + > div#title { + font-size: 15pt; + h1, h2 { + margin: 0; + } + + h2 { + font-style: italic; + font-weight: normal; + display: block; + z-index: 1; + } + + margin-top: 0.5em; + margin-bottom: 1em; + @include center-that-bitch; + } + + div#post-toc-container { + display: grid; + grid-template-columns: 0.5fr 2fr 0.5fr; + gap: 1em; + + aside#toc { + display: block !important; + + h3 { @include center-that-bitch; } + + div#toc-container { + overflow-x: hidden; + width: 100%; + position: sticky; + top: 2em; + + overflow-y: auto; + max-height: 90vh; + + bottom: 2em; + + ul { + border-left: 2px solid $bluegray-400; + list-style-type: none; + padding-left: 1em; + + a { + text-decoration: none; + } + + a:hover { + text-decoration: underline; + } + } + } + + } + + article { + max-width: 100ch; + margin-top: -100px; + margin: auto; + } + } + + div.columns { + display: grid; + grid-template-columns: 1fr 1fr; + gap: 1em; + + } + } +} + +#index { + padding-top: 4em; + + a.ico-left { + img { + clip-path: url(#squircle); + width: 96px; + height: 96px; + } + float: left; + margin-right: 1em; + width: 96px; + height: 96px; + } + + a.ico-right { + img { + clip-path: url(#squircle); + width: 96px; + height: 96px; + } + float: right; + margin-left: 1em; + width: 96px; + height: 96px; + } + + div#social { + display: flex; + flex-direction: row; + justify-content: center; + flex-wrap: wrap; + width: 100%; + gap: 8px; + + img { + width: 48px; + height: 48px; + clip-path: url(#squircle); + transition: width 0.25s, height 0.25s; + &:hover { + width: 54px; + height: 54px; + } + } + + a { + filter: drop-shadow(2px 2px 3px rgba(50, 50, 0, 0.5)); + height: 54px; + } + } + + display: flex; + flex-direction: column; +} + +@media only screen and (min-width: 1500px) { + #index { + display: grid; + grid-template-columns: 0.20fr 0.75fr 0.20fr 1fr 0.20fr; + } +} + + +details { + margin-top: 1em; + margin-bottom: 1em; +} + +// Styles for code +code.kw, span.kw { color: $code-pink; } /* Keyword */ +code.dt, span.dt { color: $code-blue; } /* DataType */ +code.dv, span.dv { color: $code-orange; } /* DecVal */ +code.bn, span.bn { color: $code-orange; } /* BaseN */ +code.fl, span.fl { color: $code-orange; } /* Float */ +code.ch, span.ch { color: $code-green; } /* Char */ +code.st, span.st { color: $code-green; } /* String */ +code.co, span.co { color: $code-grey; } /* Comment */ +code.ot, span.ot { color: $code-green; } /* Other */ +code.al, span.al { color: #ff0000; } /* Alert */ +code.fu, span.fu { color: $code-fg; } /* Function */ +code.er, span.er { color: #ff0000; } /* Error */ +code.wa, span.wa { color: #60a0b0; } /* Warning */ +code.cn, span.cn { color: $code-orange; } /* Constant */ +code.sc, span.sc { color: $code-yellow; } /* SpecialChar */ +code.vs, span.vs { color: $code-blue; } /* VerbatimString */ +code.ss, span.ss { color: $code-green; } /* SpecialString */ +code.va, span.va { color: $code-fg; } /* Variable */ +code.cf, span.cf { color: $code-pink; } /* ControlFlow */ +code.op, span.op { color: $code-green; } /* Operator */ +code.pp, span.pp { color: $code-orange; } /* Preprocessor */ +code.at, span.at { color: $code-green; } /* Attribute */ +code.do, span.do { color: $code-red; } /* Documentation */ +code.an, span.an { color: $code-red; } /* Annotation */ +code.cv, span.cv { color: $code-red; } /* CommentVar */ \ No newline at end of file diff --git a/css/fonts.scss b/css/fonts.scss new file mode 100644 index 0000000..14a3390 --- /dev/null +++ b/css/fonts.scss @@ -0,0 +1,419 @@ +@font-face { + font-family: 'Iosevka'; + font-display: swap; + font-weight: 400; + font-stretch: normal; + font-style: normal; + src: url('/static/woff2/iosevk-abbie-regular.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-regular.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka'; + font-display: swap; + font-weight: 400; + font-stretch: expanded; + font-style: normal; + src: url('/static/woff2/iosevk-abbie-extended.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extended.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka'; + font-display: swap; + font-weight: 400; + font-stretch: normal; + font-style: oblique; + src: url('/static/woff2/iosevk-abbie-oblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-oblique.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka Oblique'; + font-display: swap; + font-weight: 400; + font-stretch: normal; + src: url('/static/woff2/iosevk-abbie-oblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-oblique.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka'; + font-display: swap; + font-weight: 400; + font-stretch: expanded; + font-style: oblique; + src: url('/static/woff2/iosevk-abbie-extendedoblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendedoblique.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka Oblique'; + font-display: swap; + font-weight: 400; + font-stretch: expanded; + src: url('/static/woff2/iosevk-abbie-extendedoblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendedoblique.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka'; + font-display: swap; + font-weight: 400; + font-stretch: normal; + font-style: italic; + src: url('/static/woff2/iosevk-abbie-italic.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-italic.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka'; + font-display: swap; + font-weight: 400; + font-stretch: expanded; + font-style: italic; + src: url('/static/woff2/iosevk-abbie-extendeditalic.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendeditalic.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka'; + font-display: swap; + font-weight: 500; + font-stretch: normal; + font-style: normal; + src: url('/static/woff2/iosevk-abbie-medium.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-medium.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka'; + font-display: swap; + font-weight: 500; + font-stretch: expanded; + font-style: normal; + src: url('/static/woff2/iosevk-abbie-extendedmedium.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendedmedium.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka'; + font-display: swap; + font-weight: 500; + font-stretch: normal; + font-style: oblique; + src: url('/static/woff2/iosevk-abbie-mediumoblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-mediumoblique.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka Oblique'; + font-display: swap; + font-weight: 500; + font-stretch: normal; + src: url('/static/woff2/iosevk-abbie-mediumoblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-mediumoblique.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka'; + font-display: swap; + font-weight: 500; + font-stretch: expanded; + font-style: oblique; + src: url('/static/woff2/iosevk-abbie-extendedmediumoblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendedmediumoblique.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka Oblique'; + font-display: swap; + font-weight: 500; + font-stretch: expanded; + src: url('/static/woff2/iosevk-abbie-extendedmediumoblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendedmediumoblique.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka'; + font-display: swap; + font-weight: 500; + font-stretch: normal; + font-style: italic; + src: url('/static/woff2/iosevk-abbie-mediumitalic.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-mediumitalic.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka'; + font-display: swap; + font-weight: 500; + font-stretch: expanded; + font-style: italic; + src: url('/static/woff2/iosevk-abbie-extendedmediumitalic.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendedmediumitalic.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka'; + font-display: swap; + font-weight: 600; + font-stretch: normal; + font-style: normal; + src: url('/static/woff2/iosevk-abbie-semibold.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-semibold.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka'; + font-display: swap; + font-weight: 600; + font-stretch: expanded; + font-style: normal; + src: url('/static/woff2/iosevk-abbie-extendedsemibold.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendedsemibold.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka'; + font-display: swap; + font-weight: 600; + font-stretch: normal; + font-style: oblique; + src: url('/static/woff2/iosevk-abbie-semiboldoblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-semiboldoblique.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka Oblique'; + font-display: swap; + font-weight: 600; + font-stretch: normal; + src: url('/static/woff2/iosevk-abbie-semiboldoblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-semiboldoblique.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka'; + font-display: swap; + font-weight: 600; + font-stretch: expanded; + font-style: oblique; + src: url('/static/woff2/iosevk-abbie-extendedsemiboldoblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendedsemiboldoblique.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka Oblique'; + font-display: swap; + font-weight: 600; + font-stretch: expanded; + src: url('/static/woff2/iosevk-abbie-extendedsemiboldoblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendedsemiboldoblique.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka'; + font-display: swap; + font-weight: 600; + font-stretch: normal; + font-style: italic; + src: url('/static/woff2/iosevk-abbie-semibolditalic.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-semibolditalic.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka'; + font-display: swap; + font-weight: 600; + font-stretch: expanded; + font-style: italic; + src: url('/static/woff2/iosevk-abbie-extendedsemibolditalic.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendedsemibolditalic.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka'; + font-display: swap; + font-weight: 700; + font-stretch: normal; + font-style: normal; + src: url('/static/woff2/iosevk-abbie-bold.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-bold.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka'; + font-display: swap; + font-weight: 700; + font-stretch: expanded; + font-style: normal; + src: url('/static/woff2/iosevk-abbie-extendedbold.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendedbold.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka'; + font-display: swap; + font-weight: 700; + font-stretch: normal; + font-style: oblique; + src: url('/static/woff2/iosevk-abbie-boldoblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-boldoblique.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka Oblique'; + font-display: swap; + font-weight: 700; + font-stretch: normal; + src: url('/static/woff2/iosevk-abbie-boldoblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-boldoblique.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka'; + font-display: swap; + font-weight: 700; + font-stretch: expanded; + font-style: oblique; + src: url('/static/woff2/iosevk-abbie-extendedboldoblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendedboldoblique.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka Oblique'; + font-display: swap; + font-weight: 700; + font-stretch: expanded; + src: url('/static/woff2/iosevk-abbie-extendedboldoblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendedboldoblique.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka'; + font-display: swap; + font-weight: 700; + font-stretch: normal; + font-style: italic; + src: url('/static/woff2/iosevk-abbie-bolditalic.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-bolditalic.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka'; + font-display: swap; + font-weight: 700; + font-stretch: expanded; + font-style: italic; + src: url('/static/woff2/iosevk-abbie-extendedbolditalic.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendedbolditalic.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka'; + font-display: swap; + font-weight: 800; + font-stretch: normal; + font-style: normal; + src: url('/static/woff2/iosevk-abbie-extrabold.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extrabold.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka'; + font-display: swap; + font-weight: 800; + font-stretch: expanded; + font-style: normal; + src: url('/static/woff2/iosevk-abbie-extendedextrabold.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendedextrabold.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka'; + font-display: swap; + font-weight: 800; + font-stretch: normal; + font-style: oblique; + src: url('/static/woff2/iosevk-abbie-extraboldoblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extraboldoblique.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka Oblique'; + font-display: swap; + font-weight: 800; + font-stretch: normal; + src: url('/static/woff2/iosevk-abbie-extraboldoblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extraboldoblique.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka'; + font-display: swap; + font-weight: 800; + font-stretch: expanded; + font-style: oblique; + src: url('/static/woff2/iosevk-abbie-extendedextraboldoblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendedextraboldoblique.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka Oblique'; + font-display: swap; + font-weight: 800; + font-stretch: expanded; + src: url('/static/woff2/iosevk-abbie-extendedextraboldoblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendedextraboldoblique.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka'; + font-display: swap; + font-weight: 800; + font-stretch: normal; + font-style: italic; + src: url('/static/woff2/iosevk-abbie-extrabolditalic.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extrabolditalic.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka'; + font-display: swap; + font-weight: 800; + font-stretch: expanded; + font-style: italic; + src: url('/static/woff2/iosevk-abbie-extendedextrabolditalic.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendedextrabolditalic.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka'; + font-display: swap; + font-weight: 900; + font-stretch: normal; + font-style: normal; + src: url('/static/woff2/iosevk-abbie-heavy.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-heavy.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka'; + font-display: swap; + font-weight: 900; + font-stretch: expanded; + font-style: normal; + src: url('/static/woff2/iosevk-abbie-extendedheavy.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendedheavy.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka'; + font-display: swap; + font-weight: 900; + font-stretch: normal; + font-style: oblique; + src: url('/static/woff2/iosevk-abbie-heavyoblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-heavyoblique.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka Oblique'; + font-display: swap; + font-weight: 900; + font-stretch: normal; + src: url('/static/woff2/iosevk-abbie-heavyoblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-heavyoblique.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka'; + font-display: swap; + font-weight: 900; + font-stretch: expanded; + font-style: oblique; + src: url('/static/woff2/iosevk-abbie-extendedheavyoblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendedheavyoblique.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka Oblique'; + font-display: swap; + font-weight: 900; + font-stretch: expanded; + src: url('/static/woff2/iosevk-abbie-extendedheavyoblique.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendedheavyoblique.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka'; + font-display: swap; + font-weight: 900; + font-stretch: normal; + font-style: italic; + src: url('/static/woff2/iosevk-abbie-heavyitalic.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-heavyitalic.ttf') format('truetype'); +} + +@font-face { + font-family: 'Iosevka'; + font-display: swap; + font-weight: 900; + font-stretch: expanded; + font-style: italic; + src: url('/static/woff2/iosevk-abbie-extendedheavyitalic.woff2') format('woff2'), url('/static/ttf/iosevk-abbie-extendedheavyitalic.ttf') format('truetype'); +} diff --git a/css/katex.min.css b/css/katex.min.css new file mode 100644 index 0000000..9655d47 --- /dev/null +++ b/css/katex.min.css @@ -0,0 +1 @@ +@font-face{font-family:KaTeX_AMS;font-style:normal;font-weight:400;src:url(fonts/KaTeX_AMS-Regular.woff2) format("woff2"),url(fonts/KaTeX_AMS-Regular.woff) format("woff"),url(fonts/KaTeX_AMS-Regular.ttf) format("truetype")}@font-face{font-family:KaTeX_Caligraphic;font-style:normal;font-weight:700;src:url(fonts/KaTeX_Caligraphic-Bold.woff2) format("woff2"),url(fonts/KaTeX_Caligraphic-Bold.woff) format("woff"),url(fonts/KaTeX_Caligraphic-Bold.ttf) format("truetype")}@font-face{font-family:KaTeX_Caligraphic;font-style:normal;font-weight:400;src:url(fonts/KaTeX_Caligraphic-Regular.woff2) format("woff2"),url(fonts/KaTeX_Caligraphic-Regular.woff) format("woff"),url(fonts/KaTeX_Caligraphic-Regular.ttf) format("truetype")}@font-face{font-family:KaTeX_Fraktur;font-style:normal;font-weight:700;src:url(fonts/KaTeX_Fraktur-Bold.woff2) format("woff2"),url(fonts/KaTeX_Fraktur-Bold.woff) format("woff"),url(fonts/KaTeX_Fraktur-Bold.ttf) format("truetype")}@font-face{font-family:KaTeX_Fraktur;font-style:normal;font-weight:400;src:url(fonts/KaTeX_Fraktur-Regular.woff2) format("woff2"),url(fonts/KaTeX_Fraktur-Regular.woff) format("woff"),url(fonts/KaTeX_Fraktur-Regular.ttf) format("truetype")}@font-face{font-family:KaTeX_Main;font-style:normal;font-weight:700;src:url(fonts/KaTeX_Main-Bold.woff2) format("woff2"),url(fonts/KaTeX_Main-Bold.woff) format("woff"),url(fonts/KaTeX_Main-Bold.ttf) format("truetype")}@font-face{font-family:KaTeX_Main;font-style:italic;font-weight:700;src:url(fonts/KaTeX_Main-BoldItalic.woff2) format("woff2"),url(fonts/KaTeX_Main-BoldItalic.woff) format("woff"),url(fonts/KaTeX_Main-BoldItalic.ttf) format("truetype")}@font-face{font-family:KaTeX_Main;font-style:italic;font-weight:400;src:url(fonts/KaTeX_Main-Italic.woff2) format("woff2"),url(fonts/KaTeX_Main-Italic.woff) format("woff"),url(fonts/KaTeX_Main-Italic.ttf) format("truetype")}@font-face{font-family:KaTeX_Main;font-style:normal;font-weight:400;src:url(fonts/KaTeX_Main-Regular.woff2) format("woff2"),url(fonts/KaTeX_Main-Regular.woff) format("woff"),url(fonts/KaTeX_Main-Regular.ttf) format("truetype")}@font-face{font-family:KaTeX_Math;font-style:italic;font-weight:700;src:url(fonts/KaTeX_Math-BoldItalic.woff2) format("woff2"),url(fonts/KaTeX_Math-BoldItalic.woff) format("woff"),url(fonts/KaTeX_Math-BoldItalic.ttf) format("truetype")}@font-face{font-family:KaTeX_Math;font-style:italic;font-weight:400;src:url(fonts/KaTeX_Math-Italic.woff2) format("woff2"),url(fonts/KaTeX_Math-Italic.woff) format("woff"),url(fonts/KaTeX_Math-Italic.ttf) format("truetype")}@font-face{font-family:"KaTeX_SansSerif";font-style:normal;font-weight:700;src:url(fonts/KaTeX_SansSerif-Bold.woff2) format("woff2"),url(fonts/KaTeX_SansSerif-Bold.woff) format("woff"),url(fonts/KaTeX_SansSerif-Bold.ttf) format("truetype")}@font-face{font-family:"KaTeX_SansSerif";font-style:italic;font-weight:400;src:url(fonts/KaTeX_SansSerif-Italic.woff2) format("woff2"),url(fonts/KaTeX_SansSerif-Italic.woff) format("woff"),url(fonts/KaTeX_SansSerif-Italic.ttf) format("truetype")}@font-face{font-family:"KaTeX_SansSerif";font-style:normal;font-weight:400;src:url(fonts/KaTeX_SansSerif-Regular.woff2) format("woff2"),url(fonts/KaTeX_SansSerif-Regular.woff) format("woff"),url(fonts/KaTeX_SansSerif-Regular.ttf) format("truetype")}@font-face{font-family:KaTeX_Script;font-style:normal;font-weight:400;src:url(fonts/KaTeX_Script-Regular.woff2) format("woff2"),url(fonts/KaTeX_Script-Regular.woff) format("woff"),url(fonts/KaTeX_Script-Regular.ttf) format("truetype")}@font-face{font-family:KaTeX_Size1;font-style:normal;font-weight:400;src:url(fonts/KaTeX_Size1-Regular.woff2) format("woff2"),url(fonts/KaTeX_Size1-Regular.woff) format("woff"),url(fonts/KaTeX_Size1-Regular.ttf) format("truetype")}@font-face{font-family:KaTeX_Size2;font-style:normal;font-weight:400;src:url(fonts/KaTeX_Size2-Regular.woff2) format("woff2"),url(fonts/KaTeX_Size2-Regular.woff) format("woff"),url(fonts/KaTeX_Size2-Regular.ttf) format("truetype")}@font-face{font-family:KaTeX_Size3;font-style:normal;font-weight:400;src:url(fonts/KaTeX_Size3-Regular.woff2) format("woff2"),url(fonts/KaTeX_Size3-Regular.woff) format("woff"),url(fonts/KaTeX_Size3-Regular.ttf) format("truetype")}@font-face{font-family:KaTeX_Size4;font-style:normal;font-weight:400;src:url(fonts/KaTeX_Size4-Regular.woff2) format("woff2"),url(fonts/KaTeX_Size4-Regular.woff) format("woff"),url(fonts/KaTeX_Size4-Regular.ttf) format("truetype")}@font-face{font-family:KaTeX_Typewriter;font-style:normal;font-weight:400;src:url(fonts/KaTeX_Typewriter-Regular.woff2) format("woff2"),url(fonts/KaTeX_Typewriter-Regular.woff) format("woff"),url(fonts/KaTeX_Typewriter-Regular.ttf) format("truetype")}.katex{font:normal 1.21em KaTeX_Main,Times New Roman,serif;line-height:1.2;text-indent:0;text-rendering:auto}.katex *{-ms-high-contrast-adjust:none!important;border-color:currentColor}.katex .katex-version:after{content:"0.13.11"}.katex .katex-mathml{clip:rect(1px,1px,1px,1px);border:0;height:1px;overflow:hidden;padding:0;position:absolute;width:1px}.katex .katex-html>.newline{display:block}.katex .base{position:relative;white-space:nowrap;width:-webkit-min-content;width:-moz-min-content;width:min-content}.katex .base,.katex .strut{display:inline-block}.katex .textbf{font-weight:700}.katex .textit{font-style:italic}.katex .textrm{font-family:KaTeX_Main}.katex .textsf{font-family:KaTeX_SansSerif}.katex .texttt{font-family:KaTeX_Typewriter}.katex .mathnormal{font-family:KaTeX_Math;font-style:italic}.katex .mathit{font-family:KaTeX_Main;font-style:italic}.katex .mathrm{font-style:normal}.katex .mathbf{font-family:KaTeX_Main;font-weight:700}.katex .boldsymbol{font-family:KaTeX_Math;font-style:italic;font-weight:700}.katex .amsrm,.katex .mathbb,.katex .textbb{font-family:KaTeX_AMS}.katex .mathcal{font-family:KaTeX_Caligraphic}.katex .mathfrak,.katex .textfrak{font-family:KaTeX_Fraktur}.katex .mathtt{font-family:KaTeX_Typewriter}.katex .mathscr,.katex .textscr{font-family:KaTeX_Script}.katex .mathsf,.katex .textsf{font-family:KaTeX_SansSerif}.katex .mathboldsf,.katex .textboldsf{font-family:KaTeX_SansSerif;font-weight:700}.katex .mathitsf,.katex .textitsf{font-family:KaTeX_SansSerif;font-style:italic}.katex .mainrm{font-family:KaTeX_Main;font-style:normal}.katex .vlist-t{border-collapse:collapse;display:inline-table;table-layout:fixed}.katex .vlist-r{display:table-row}.katex .vlist{display:table-cell;position:relative;vertical-align:bottom}.katex .vlist>span{display:block;height:0;position:relative}.katex .vlist>span>span{display:inline-block}.katex .vlist>span>.pstrut{overflow:hidden;width:0}.katex .vlist-t2{margin-right:-2px}.katex .vlist-s{display:table-cell;font-size:1px;min-width:2px;vertical-align:bottom;width:2px}.katex .vbox{-webkit-box-orient:vertical;-webkit-box-direction:normal;-webkit-box-align:baseline;align-items:baseline;display:-webkit-inline-box;display:inline-flex;flex-direction:column}.katex .hbox{width:100%}.katex .hbox,.katex .thinbox{-webkit-box-orient:horizontal;-webkit-box-direction:normal;display:-webkit-inline-box;display:inline-flex;flex-direction:row}.katex .thinbox{max-width:0;width:0}.katex .msupsub{text-align:left}.katex .mfrac>span>span{text-align:center}.katex .mfrac .frac-line{border-bottom-style:solid;display:inline-block;width:100%}.katex .hdashline,.katex .hline,.katex .mfrac .frac-line,.katex .overline .overline-line,.katex .rule,.katex .underline .underline-line{min-height:1px}.katex .mspace{display:inline-block}.katex .clap,.katex .llap,.katex .rlap{position:relative;width:0}.katex .clap>.inner,.katex .llap>.inner,.katex .rlap>.inner{position:absolute}.katex .clap>.fix,.katex .llap>.fix,.katex .rlap>.fix{display:inline-block}.katex .llap>.inner{right:0}.katex .clap>.inner,.katex .rlap>.inner{left:0}.katex .clap>.inner>span{margin-left:-50%;margin-right:50%}.katex .rule{border:0 solid;display:inline-block;position:relative}.katex .hline,.katex .overline .overline-line,.katex .underline .underline-line{border-bottom-style:solid;display:inline-block;width:100%}.katex .hdashline{border-bottom-style:dashed;display:inline-block;width:100%}.katex .sqrt>.root{margin-left:.27777778em;margin-right:-.55555556em}.katex .fontsize-ensurer.reset-size1.size1,.katex .sizing.reset-size1.size1{font-size:1em}.katex .fontsize-ensurer.reset-size1.size2,.katex .sizing.reset-size1.size2{font-size:1.2em}.katex .fontsize-ensurer.reset-size1.size3,.katex .sizing.reset-size1.size3{font-size:1.4em}.katex .fontsize-ensurer.reset-size1.size4,.katex .sizing.reset-size1.size4{font-size:1.6em}.katex .fontsize-ensurer.reset-size1.size5,.katex .sizing.reset-size1.size5{font-size:1.8em}.katex .fontsize-ensurer.reset-size1.size6,.katex .sizing.reset-size1.size6{font-size:2em}.katex .fontsize-ensurer.reset-size1.size7,.katex .sizing.reset-size1.size7{font-size:2.4em}.katex .fontsize-ensurer.reset-size1.size8,.katex .sizing.reset-size1.size8{font-size:2.88em}.katex .fontsize-ensurer.reset-size1.size9,.katex .sizing.reset-size1.size9{font-size:3.456em}.katex .fontsize-ensurer.reset-size1.size10,.katex .sizing.reset-size1.size10{font-size:4.148em}.katex .fontsize-ensurer.reset-size1.size11,.katex .sizing.reset-size1.size11{font-size:4.976em}.katex .fontsize-ensurer.reset-size2.size1,.katex .sizing.reset-size2.size1{font-size:.83333333em}.katex .fontsize-ensurer.reset-size2.size2,.katex .sizing.reset-size2.size2{font-size:1em}.katex .fontsize-ensurer.reset-size2.size3,.katex .sizing.reset-size2.size3{font-size:1.16666667em}.katex .fontsize-ensurer.reset-size2.size4,.katex .sizing.reset-size2.size4{font-size:1.33333333em}.katex .fontsize-ensurer.reset-size2.size5,.katex .sizing.reset-size2.size5{font-size:1.5em}.katex .fontsize-ensurer.reset-size2.size6,.katex .sizing.reset-size2.size6{font-size:1.66666667em}.katex .fontsize-ensurer.reset-size2.size7,.katex .sizing.reset-size2.size7{font-size:2em}.katex .fontsize-ensurer.reset-size2.size8,.katex .sizing.reset-size2.size8{font-size:2.4em}.katex .fontsize-ensurer.reset-size2.size9,.katex .sizing.reset-size2.size9{font-size:2.88em}.katex .fontsize-ensurer.reset-size2.size10,.katex .sizing.reset-size2.size10{font-size:3.45666667em}.katex .fontsize-ensurer.reset-size2.size11,.katex .sizing.reset-size2.size11{font-size:4.14666667em}.katex .fontsize-ensurer.reset-size3.size1,.katex .sizing.reset-size3.size1{font-size:.71428571em}.katex .fontsize-ensurer.reset-size3.size2,.katex .sizing.reset-size3.size2{font-size:.85714286em}.katex .fontsize-ensurer.reset-size3.size3,.katex .sizing.reset-size3.size3{font-size:1em}.katex .fontsize-ensurer.reset-size3.size4,.katex .sizing.reset-size3.size4{font-size:1.14285714em}.katex .fontsize-ensurer.reset-size3.size5,.katex .sizing.reset-size3.size5{font-size:1.28571429em}.katex .fontsize-ensurer.reset-size3.size6,.katex .sizing.reset-size3.size6{font-size:1.42857143em}.katex .fontsize-ensurer.reset-size3.size7,.katex .sizing.reset-size3.size7{font-size:1.71428571em}.katex .fontsize-ensurer.reset-size3.size8,.katex .sizing.reset-size3.size8{font-size:2.05714286em}.katex .fontsize-ensurer.reset-size3.size9,.katex .sizing.reset-size3.size9{font-size:2.46857143em}.katex .fontsize-ensurer.reset-size3.size10,.katex .sizing.reset-size3.size10{font-size:2.96285714em}.katex .fontsize-ensurer.reset-size3.size11,.katex .sizing.reset-size3.size11{font-size:3.55428571em}.katex .fontsize-ensurer.reset-size4.size1,.katex .sizing.reset-size4.size1{font-size:.625em}.katex .fontsize-ensurer.reset-size4.size2,.katex .sizing.reset-size4.size2{font-size:.75em}.katex .fontsize-ensurer.reset-size4.size3,.katex .sizing.reset-size4.size3{font-size:.875em}.katex .fontsize-ensurer.reset-size4.size4,.katex .sizing.reset-size4.size4{font-size:1em}.katex .fontsize-ensurer.reset-size4.size5,.katex .sizing.reset-size4.size5{font-size:1.125em}.katex .fontsize-ensurer.reset-size4.size6,.katex .sizing.reset-size4.size6{font-size:1.25em}.katex .fontsize-ensurer.reset-size4.size7,.katex .sizing.reset-size4.size7{font-size:1.5em}.katex .fontsize-ensurer.reset-size4.size8,.katex .sizing.reset-size4.size8{font-size:1.8em}.katex .fontsize-ensurer.reset-size4.size9,.katex .sizing.reset-size4.size9{font-size:2.16em}.katex .fontsize-ensurer.reset-size4.size10,.katex .sizing.reset-size4.size10{font-size:2.5925em}.katex .fontsize-ensurer.reset-size4.size11,.katex .sizing.reset-size4.size11{font-size:3.11em}.katex .fontsize-ensurer.reset-size5.size1,.katex .sizing.reset-size5.size1{font-size:.55555556em}.katex .fontsize-ensurer.reset-size5.size2,.katex .sizing.reset-size5.size2{font-size:.66666667em}.katex .fontsize-ensurer.reset-size5.size3,.katex .sizing.reset-size5.size3{font-size:.77777778em}.katex .fontsize-ensurer.reset-size5.size4,.katex .sizing.reset-size5.size4{font-size:.88888889em}.katex .fontsize-ensurer.reset-size5.size5,.katex .sizing.reset-size5.size5{font-size:1em}.katex .fontsize-ensurer.reset-size5.size6,.katex .sizing.reset-size5.size6{font-size:1.11111111em}.katex .fontsize-ensurer.reset-size5.size7,.katex .sizing.reset-size5.size7{font-size:1.33333333em}.katex .fontsize-ensurer.reset-size5.size8,.katex .sizing.reset-size5.size8{font-size:1.6em}.katex .fontsize-ensurer.reset-size5.size9,.katex .sizing.reset-size5.size9{font-size:1.92em}.katex .fontsize-ensurer.reset-size5.size10,.katex .sizing.reset-size5.size10{font-size:2.30444444em}.katex .fontsize-ensurer.reset-size5.size11,.katex .sizing.reset-size5.size11{font-size:2.76444444em}.katex .fontsize-ensurer.reset-size6.size1,.katex .sizing.reset-size6.size1{font-size:.5em}.katex .fontsize-ensurer.reset-size6.size2,.katex .sizing.reset-size6.size2{font-size:.6em}.katex .fontsize-ensurer.reset-size6.size3,.katex .sizing.reset-size6.size3{font-size:.7em}.katex .fontsize-ensurer.reset-size6.size4,.katex .sizing.reset-size6.size4{font-size:.8em}.katex .fontsize-ensurer.reset-size6.size5,.katex .sizing.reset-size6.size5{font-size:.9em}.katex .fontsize-ensurer.reset-size6.size6,.katex .sizing.reset-size6.size6{font-size:1em}.katex .fontsize-ensurer.reset-size6.size7,.katex .sizing.reset-size6.size7{font-size:1.2em}.katex .fontsize-ensurer.reset-size6.size8,.katex .sizing.reset-size6.size8{font-size:1.44em}.katex .fontsize-ensurer.reset-size6.size9,.katex .sizing.reset-size6.size9{font-size:1.728em}.katex .fontsize-ensurer.reset-size6.size10,.katex .sizing.reset-size6.size10{font-size:2.074em}.katex .fontsize-ensurer.reset-size6.size11,.katex .sizing.reset-size6.size11{font-size:2.488em}.katex .fontsize-ensurer.reset-size7.size1,.katex .sizing.reset-size7.size1{font-size:.41666667em}.katex .fontsize-ensurer.reset-size7.size2,.katex .sizing.reset-size7.size2{font-size:.5em}.katex .fontsize-ensurer.reset-size7.size3,.katex .sizing.reset-size7.size3{font-size:.58333333em}.katex .fontsize-ensurer.reset-size7.size4,.katex .sizing.reset-size7.size4{font-size:.66666667em}.katex .fontsize-ensurer.reset-size7.size5,.katex .sizing.reset-size7.size5{font-size:.75em}.katex .fontsize-ensurer.reset-size7.size6,.katex .sizing.reset-size7.size6{font-size:.83333333em}.katex .fontsize-ensurer.reset-size7.size7,.katex .sizing.reset-size7.size7{font-size:1em}.katex .fontsize-ensurer.reset-size7.size8,.katex .sizing.reset-size7.size8{font-size:1.2em}.katex .fontsize-ensurer.reset-size7.size9,.katex .sizing.reset-size7.size9{font-size:1.44em}.katex .fontsize-ensurer.reset-size7.size10,.katex .sizing.reset-size7.size10{font-size:1.72833333em}.katex .fontsize-ensurer.reset-size7.size11,.katex .sizing.reset-size7.size11{font-size:2.07333333em}.katex .fontsize-ensurer.reset-size8.size1,.katex .sizing.reset-size8.size1{font-size:.34722222em}.katex .fontsize-ensurer.reset-size8.size2,.katex .sizing.reset-size8.size2{font-size:.41666667em}.katex .fontsize-ensurer.reset-size8.size3,.katex .sizing.reset-size8.size3{font-size:.48611111em}.katex .fontsize-ensurer.reset-size8.size4,.katex .sizing.reset-size8.size4{font-size:.55555556em}.katex .fontsize-ensurer.reset-size8.size5,.katex .sizing.reset-size8.size5{font-size:.625em}.katex .fontsize-ensurer.reset-size8.size6,.katex .sizing.reset-size8.size6{font-size:.69444444em}.katex .fontsize-ensurer.reset-size8.size7,.katex .sizing.reset-size8.size7{font-size:.83333333em}.katex .fontsize-ensurer.reset-size8.size8,.katex .sizing.reset-size8.size8{font-size:1em}.katex .fontsize-ensurer.reset-size8.size9,.katex .sizing.reset-size8.size9{font-size:1.2em}.katex .fontsize-ensurer.reset-size8.size10,.katex .sizing.reset-size8.size10{font-size:1.44027778em}.katex .fontsize-ensurer.reset-size8.size11,.katex .sizing.reset-size8.size11{font-size:1.72777778em}.katex .fontsize-ensurer.reset-size9.size1,.katex .sizing.reset-size9.size1{font-size:.28935185em}.katex .fontsize-ensurer.reset-size9.size2,.katex .sizing.reset-size9.size2{font-size:.34722222em}.katex .fontsize-ensurer.reset-size9.size3,.katex .sizing.reset-size9.size3{font-size:.40509259em}.katex .fontsize-ensurer.reset-size9.size4,.katex .sizing.reset-size9.size4{font-size:.46296296em}.katex .fontsize-ensurer.reset-size9.size5,.katex .sizing.reset-size9.size5{font-size:.52083333em}.katex .fontsize-ensurer.reset-size9.size6,.katex .sizing.reset-size9.size6{font-size:.5787037em}.katex .fontsize-ensurer.reset-size9.size7,.katex .sizing.reset-size9.size7{font-size:.69444444em}.katex .fontsize-ensurer.reset-size9.size8,.katex .sizing.reset-size9.size8{font-size:.83333333em}.katex .fontsize-ensurer.reset-size9.size9,.katex .sizing.reset-size9.size9{font-size:1em}.katex .fontsize-ensurer.reset-size9.size10,.katex .sizing.reset-size9.size10{font-size:1.20023148em}.katex .fontsize-ensurer.reset-size9.size11,.katex .sizing.reset-size9.size11{font-size:1.43981481em}.katex .fontsize-ensurer.reset-size10.size1,.katex .sizing.reset-size10.size1{font-size:.24108004em}.katex .fontsize-ensurer.reset-size10.size2,.katex .sizing.reset-size10.size2{font-size:.28929605em}.katex .fontsize-ensurer.reset-size10.size3,.katex .sizing.reset-size10.size3{font-size:.33751205em}.katex .fontsize-ensurer.reset-size10.size4,.katex .sizing.reset-size10.size4{font-size:.38572806em}.katex .fontsize-ensurer.reset-size10.size5,.katex .sizing.reset-size10.size5{font-size:.43394407em}.katex .fontsize-ensurer.reset-size10.size6,.katex .sizing.reset-size10.size6{font-size:.48216008em}.katex .fontsize-ensurer.reset-size10.size7,.katex .sizing.reset-size10.size7{font-size:.57859209em}.katex .fontsize-ensurer.reset-size10.size8,.katex .sizing.reset-size10.size8{font-size:.69431051em}.katex .fontsize-ensurer.reset-size10.size9,.katex .sizing.reset-size10.size9{font-size:.83317261em}.katex .fontsize-ensurer.reset-size10.size10,.katex .sizing.reset-size10.size10{font-size:1em}.katex .fontsize-ensurer.reset-size10.size11,.katex .sizing.reset-size10.size11{font-size:1.19961427em}.katex .fontsize-ensurer.reset-size11.size1,.katex .sizing.reset-size11.size1{font-size:.20096463em}.katex .fontsize-ensurer.reset-size11.size2,.katex .sizing.reset-size11.size2{font-size:.24115756em}.katex .fontsize-ensurer.reset-size11.size3,.katex .sizing.reset-size11.size3{font-size:.28135048em}.katex .fontsize-ensurer.reset-size11.size4,.katex .sizing.reset-size11.size4{font-size:.32154341em}.katex .fontsize-ensurer.reset-size11.size5,.katex .sizing.reset-size11.size5{font-size:.36173633em}.katex .fontsize-ensurer.reset-size11.size6,.katex .sizing.reset-size11.size6{font-size:.40192926em}.katex .fontsize-ensurer.reset-size11.size7,.katex .sizing.reset-size11.size7{font-size:.48231511em}.katex .fontsize-ensurer.reset-size11.size8,.katex .sizing.reset-size11.size8{font-size:.57877814em}.katex .fontsize-ensurer.reset-size11.size9,.katex .sizing.reset-size11.size9{font-size:.69453376em}.katex .fontsize-ensurer.reset-size11.size10,.katex .sizing.reset-size11.size10{font-size:.83360129em}.katex .fontsize-ensurer.reset-size11.size11,.katex .sizing.reset-size11.size11{font-size:1em}.katex .delimsizing.size1{font-family:KaTeX_Size1}.katex .delimsizing.size2{font-family:KaTeX_Size2}.katex .delimsizing.size3{font-family:KaTeX_Size3}.katex .delimsizing.size4{font-family:KaTeX_Size4}.katex .delimsizing.mult .delim-size1>span{font-family:KaTeX_Size1}.katex .delimsizing.mult .delim-size4>span{font-family:KaTeX_Size4}.katex .nulldelimiter{display:inline-block;width:.12em}.katex .delimcenter,.katex .op-symbol{position:relative}.katex .op-symbol.small-op{font-family:KaTeX_Size1}.katex .op-symbol.large-op{font-family:KaTeX_Size2}.katex .accent>.vlist-t,.katex .op-limits>.vlist-t{text-align:center}.katex .accent .accent-body{position:relative}.katex .accent .accent-body:not(.accent-full){width:0}.katex .overlay{display:block}.katex .mtable .vertical-separator{display:inline-block;min-width:1px}.katex .mtable .arraycolsep{display:inline-block}.katex .mtable .col-align-c>.vlist-t{text-align:center}.katex .mtable .col-align-l>.vlist-t{text-align:left}.katex .mtable .col-align-r>.vlist-t{text-align:right}.katex .svg-align{text-align:left}.katex svg{fill:currentColor;stroke:currentColor;fill-rule:nonzero;fill-opacity:1;stroke-width:1;stroke-linecap:butt;stroke-linejoin:miter;stroke-miterlimit:4;stroke-dasharray:none;stroke-dashoffset:0;stroke-opacity:1;display:block;height:inherit;position:absolute;width:100%}.katex svg path{stroke:none}.katex img{border-style:none;max-height:none;max-width:none;min-height:0;min-width:0}.katex .stretchy{display:block;overflow:hidden;position:relative;width:100%}.katex .stretchy:after,.katex .stretchy:before{content:""}.katex .hide-tail{overflow:hidden;position:relative;width:100%}.katex .halfarrow-left{left:0;overflow:hidden;position:absolute;width:50.2%}.katex .halfarrow-right{overflow:hidden;position:absolute;right:0;width:50.2%}.katex .brace-left{left:0;overflow:hidden;position:absolute;width:25.1%}.katex .brace-center{left:25%;overflow:hidden;position:absolute;width:50%}.katex .brace-right{overflow:hidden;position:absolute;right:0;width:25.1%}.katex .x-arrow-pad{padding:0 .5em}.katex .cd-arrow-pad{padding:0 .55556em 0 .27778em}.katex .mover,.katex .munder,.katex .x-arrow{text-align:center}.katex .boxpad{padding:0 .3em}.katex .fbox,.katex .fcolorbox{border:.04em solid;box-sizing:border-box}.katex .cancel-pad{padding:0 .2em}.katex .cancel-lap{margin-left:-.2em;margin-right:-.2em}.katex .sout{border-bottom-style:solid;border-bottom-width:.08em}.katex .angl{border-right:.049em solid;border-top:.049em solid;box-sizing:border-content;margin-right:.03889em}.katex .anglpad{padding:0 .03889em}.katex .eqn-num:before{content:"(" counter(katexEqnNo) ")";counter-increment:katexEqnNo}.katex .mml-eqn-num:before{content:"(" counter(mmlEqnNo) ")";counter-increment:mmlEqnNo}.katex .mtr-glue{width:50%}.katex .cd-vert-arrow{display:inline-block;position:relative}.katex .cd-label-left{display:inline-block;position:absolute;right:-webkit-calc(50% + .3em);right:calc(50% + .3em);text-align:left}.katex .cd-label-right{display:inline-block;left:-webkit-calc(50% + .3em);left:calc(50% + .3em);position:absolute;text-align:right}.katex-display{display:block;margin:1em 0;text-align:center}.katex-display>.katex{display:block;text-align:center;white-space:nowrap}.katex-display>.katex>.katex-html{display:block;position:relative}.katex-display>.katex>.katex-html>.tag{position:absolute;right:0}.katex-display.leqno>.katex>.katex-html>.tag{left:0;right:auto}.katex-display.fleqn>.katex{padding-left:2em;text-align:left}body{counter-reset:katexEqnNo mmlEqnNo} diff --git a/css/vars.scss b/css/vars.scss new file mode 100644 index 0000000..013b3b7 --- /dev/null +++ b/css/vars.scss @@ -0,0 +1,62 @@ +$purple-50: #faf5ff; +$purple-100: #f3e8ff; +$purple-200: #e9d5ff; +$purple-300: #d8b4fe; +$purple-400: #c084fc; +$purple-500: #a855f7; +$purple-600: #9333ea; +$purple-700: #7e22ce; +$purple-800: #6b21a8; +$purple-900: #581c87; + +$yellow-50: #fefce8; +$yellow-100: #fef9c3; +$yellow-200: #fef08a; +$yellow-300: #fde047; +$yellow-400: #facc15; +$yellow-500: #eab308; +$yellow-600: #ca8a04; +$yellow-700: #a16207; +$yellow-800: #854d0e; +$yellow-900: #713f12; + +$bluegray-50: #f8fafc; +$bluegray-100: #f1f5f9; +$bluegray-200: #e2e8f0; +$bluegray-300: #cbd5e1; +$bluegray-400: #94a3b8; +$bluegray-500: #64748b; +$bluegray-600: #475569; +$bluegray-700: #334155; +$bluegray-800: #1e293b; +$bluegray-900: #0f172a; + +$red-50: #fef2f2; +$red-100: #fee2e2; +$red-200: #fecaca; +$red-300: #fca5a5; +$red-400: #f87171; +$red-500: #ef4444; +$red-600: #dc2626; +$red-700: #b91c1c; +$red-800: #991b1b; +$red-900: #7f1d1d; + +$nav-height: 48px; + +$font-size: 14pt; + +$code-bg: #282C34; +$code-fg: #ABB2BF; +$code-red: #D65122; +$code-red-br: #AE3B36; +$code-green: #88B966; +$code-yellow: #DEB468; +$code-orange: #C58853; +$code-blue: #519DEB; +$code-pink: #C678DD; +$code-cyan: #48A8B5; +$code-white: #ABB2BF; +$code-grey: #7F848E; + +// foo \ No newline at end of file diff --git a/default.nix b/default.nix new file mode 100644 index 0000000..fd905bc --- /dev/null +++ b/default.nix @@ -0,0 +1,27 @@ +{ pkgs ? import { }, stdenv ? pkgs.stdenv }: +let + site = pkgs.haskellPackages.callCabal2nix "blag-site" ./. { }; + our-texlive = with pkgs; texlive.combine { + inherit (texlive) + collection-basic + collection-latex + xcolor + preview + pgf tikz-cd + mathpazo + varwidth xkeyval standalone + jknapltx; + }; + +in + stdenv.mkDerivation { + name = "blag"; + src = ./.; + buildInputs = with pkgs; [ + poppler_utils + rubber + nodePackages.katex + our-texlive + site + ]; + } \ No newline at end of file diff --git a/diagrams/cc/delimcc.tex b/diagrams/cc/delimcc.tex new file mode 100644 index 0000000..e380944 --- /dev/null +++ b/diagrams/cc/delimcc.tex @@ -0,0 +1,15 @@ +\begin{scope}[node distance=0.75cm] + +\node (Stk0) [draw, shape=rectangle, minimum width=1.5cm, minimum height=0.75cm, anchor=center] + at (-2.75, 0) {shift}; +\node (Stk1) [draw, shape=rectangle, minimum width=1.5cm, minimum height=0.75cm, anchor=center] + at (-2.75, -0.75) {foo}; +\node (Stk2) [draw, shape=rectangle, minimum width=1.5cm, minimum height=0.75cm, anchor=center] + at (-2.75, -1.5) {bar}; +\node (Stk3) [draw, shape=rectangle, minimum width=1.5cm, minimum height=0.75cm, anchor=center] + at (-2.75, -2.25) {reset}; +\node (Stk3) [draw, shape=rectangle, minimum width=1.5cm, minimum height=0.75cm, anchor=center] + at (-2.75, -3) {baz}; +\draw [red, very thick, dashed] (-3.6, -2.625) -- (-1.89, -2.625) -- (-1.89, 0.375) -- (-3.6, 0.375) -- cycle; +\draw [arrows={Latex}-] (-4, 0.375) -- (-4, -3.375); +\end{scope} \ No newline at end of file diff --git a/diagrams/ctt/axes.tex b/diagrams/ctt/axes.tex new file mode 100644 index 0000000..1364835 --- /dev/null +++ b/diagrams/ctt/axes.tex @@ -0,0 +1,2 @@ +\draw[->,thick] (0,0)--(1,0) node[midway,below]{\large{i}}; +\draw[->,thick] (0,0)--(0,1) node[midway,left]{\large{j}}; diff --git a/diagrams/ctt/circle.tex b/diagrams/ctt/circle.tex new file mode 100644 index 0000000..74d0709 --- /dev/null +++ b/diagrams/ctt/circle.tex @@ -0,0 +1,3 @@ +\node[draw,circle,label=below:{$\mathrm{base}$},fill,outer sep=0.1cm, inner sep=0pt, minimum size=0.1cm] (a0) at (0, -1) {}; +\draw[->] (0, 0) circle (1cm); +\node[] (loop) at (0, 0) {$\mathrm{loop}\ i$}; \ No newline at end of file diff --git a/diagrams/ctt/comp_path.tex b/diagrams/ctt/comp_path.tex new file mode 100644 index 0000000..ab24c35 --- /dev/null +++ b/diagrams/ctt/comp_path.tex @@ -0,0 +1,11 @@ +\node[] (i0j0) at (-1, -1) {x}; +\node[] (i1j0) at (1, -1) {y}; +\node[] (i0j1) at (-1, 1) {x}; +\node[] (i1j1) at (1, 1) {z}; + +\node (in) at (0, 0) {}; + +\draw[->] (i0j0) -- (i0j1) node [midway] {$a$}; +\draw[->] (i0j0) -- (i1j0) node [midway, below] {$p(i)$}; +\draw[->,dashed] (i0j1) -- (i1j1) node [midway] {}; +\draw[->] (i1j0) -- (i1j1) node [midway, right] {$q(j)$}; \ No newline at end of file diff --git a/diagrams/ctt/eq_i0_i1.tex b/diagrams/ctt/eq_i0_i1.tex new file mode 100644 index 0000000..86054e2 --- /dev/null +++ b/diagrams/ctt/eq_i0_i1.tex @@ -0,0 +1,4 @@ +\node[draw,circle,label=left:{$i0$},fill,outer sep=0.1cm, inner sep=0pt, minimum size=0.1cm] (i0) at (-1, 0) {}; +\node[draw,circle,label=right:{$i1$},fill,outer sep=0.1cm, inner sep=0pt, minimum size=0.1cm] (i1) at (1, 0) {}; + +\draw[->] (i0) -- (i1) node [midway] {$\lambda i. i$}; \ No newline at end of file diff --git a/diagrams/ctt/land_connection.tex b/diagrams/ctt/land_connection.tex new file mode 100644 index 0000000..6f8f6ea --- /dev/null +++ b/diagrams/ctt/land_connection.tex @@ -0,0 +1,11 @@ +\node[] (i0j0) at (-1, -1) {a}; +\node[] (i1j0) at (1, -1) {a}; +\node[] (i0j1) at (-1, 1) {a}; +\node[] (i1j1) at (1, 1) {b}; + +\node (in) at (0, 0) {$\lambda i j. p (i \land j)$}; + +\draw[->] (i0j0) -- (i0j1) node [midway] {$\lambda j. p\ i0$}; +\draw[->] (i0j0) -- (i1j0) node [midway, below] {$\lambda i. p\ i0$}; +\draw[->] (i0j1) -- (i1j1) node [midway] {$p$}; +\draw[->] (i1j0) -- (i1j1) node [midway, right] {$p$}; \ No newline at end of file diff --git a/diagrams/ctt/lor_connection.tex b/diagrams/ctt/lor_connection.tex new file mode 100644 index 0000000..a3ba993 --- /dev/null +++ b/diagrams/ctt/lor_connection.tex @@ -0,0 +1,11 @@ +\node[] (i0j0) at (-1, -1) {a}; +\node[] (i1j0) at (1, -1) {b}; +\node[] (i0j1) at (-1, 1) {b}; +\node[] (i1j1) at (1, 1) {b}; + +\node (in) at (0, 0) {$\lambda i j. p (i \lor j)$}; + +\draw[->] (i0j0) -- (i0j1) node [midway] {$p$}; +\draw[->] (i0j0) -- (i1j0) node [midway, below] {$p$}; +\draw[->] (i0j1) -- (i1j1) node [midway] {$\lambda i. p\ i1$}; +\draw[->] (i1j0) -- (i1j1) node [midway, right] {$\lambda j. p\ i1$}; \ No newline at end of file diff --git a/diagrams/ctt/pi_vs_pnoti_1.tex b/diagrams/ctt/pi_vs_pnoti_1.tex new file mode 100644 index 0000000..fe818ec --- /dev/null +++ b/diagrams/ctt/pi_vs_pnoti_1.tex @@ -0,0 +1,4 @@ +\node[draw,circle,label=left:{$a$},fill,outer sep=0.1cm, inner sep=0pt, minimum size=0.1cm] (i0) at (-1, 0) {}; +\node[draw,circle,label=right:{$b$},fill,outer sep=0.1cm, inner sep=0pt, minimum size=0.1cm] (i1) at (1, 0) {}; + +\draw[->] (i0) -> (i1) node [midway] {$\lambda i. p(i)$}; \ No newline at end of file diff --git a/diagrams/ctt/pi_vs_pnoti_2.tex b/diagrams/ctt/pi_vs_pnoti_2.tex new file mode 100644 index 0000000..0cc89f9 --- /dev/null +++ b/diagrams/ctt/pi_vs_pnoti_2.tex @@ -0,0 +1,4 @@ +\node[draw,circle,label=left:{$b$},fill,outer sep=0.1cm, inner sep=0pt, minimum size=0.1cm] (i0) at (-1, 0) {}; +\node[draw,circle,label=right:{$a$},fill,outer sep=0.1cm, inner sep=0pt, minimum size=0.1cm] (i1) at (1, 0) {}; + +\draw[<-] (i0) -> (i1) node [midway] {$\lambda i. p(\neg i)$}; \ No newline at end of file diff --git a/diagrams/ctt/refl_tt.tex b/diagrams/ctt/refl_tt.tex new file mode 100644 index 0000000..2b22b91 --- /dev/null +++ b/diagrams/ctt/refl_tt.tex @@ -0,0 +1,4 @@ +\node[draw,circle,label=left:{$a$},fill,outer sep=0.1cm, inner sep=0pt, minimum size=0.1cm] (i0) at (-1, 0) {}; +\node[draw,circle,label=right:{$a$},fill,outer sep=0.1cm, inner sep=0pt, minimum size=0.1cm] (i1) at (1, 0) {}; + +\draw[->] (i0) -> (i1) node [midway] {$\lambda i. a$}; \ No newline at end of file diff --git a/diagrams/ctt/span.tex b/diagrams/ctt/span.tex new file mode 100644 index 0000000..1ce3e21 --- /dev/null +++ b/diagrams/ctt/span.tex @@ -0,0 +1,6 @@ +\node[] (i0j1) at (-1, 1) {A}; +\node[] (i1j1) at (1, 1) {C}; +\node[] (i0j0) at (-1, -1) {B}; + +\draw[<-] (i0j0) -- (i0j1) node [midway] {$f$}; +\draw[->] (i0j1) -- (i1j1) node [midway] {$g$}; \ No newline at end of file diff --git a/diagrams/ctt/span_colimit.tex b/diagrams/ctt/span_colimit.tex new file mode 100644 index 0000000..3484b83 --- /dev/null +++ b/diagrams/ctt/span_colimit.tex @@ -0,0 +1,9 @@ +\node[] (i1j0) at (1, -1) {P}; +\node[] (i1j1) at (1, 1) {C}; +\node[] (i0j0) at (-1, -1) {B}; +\node[] (i0j1) at (-1, 1) {A}; + +\draw[<-] (i0j0) -- (i0j1) node [midway] {$f$}; +\draw[->] (i0j0) -- (i1j0) node [midway, below] {$i_1$}; +\draw[->] (i0j1) -- (i1j1) node [midway] {$g$}; +\draw[<-] (i1j0) -- (i1j1) node [midway, right] {$i_2$}; \ No newline at end of file diff --git a/diagrams/ctt/transp.tex b/diagrams/ctt/transp.tex new file mode 100644 index 0000000..fd2eb34 --- /dev/null +++ b/diagrams/ctt/transp.tex @@ -0,0 +1 @@ +\node[draw,circle,label=below:{$a0 : A(i0)$},fill,outer sep=0.1cm, inner sep=0pt, minimum size=0.1cm] (a0) at (-1, 0) {}; \ No newline at end of file diff --git a/diagrams/ctt/univalence.tex b/diagrams/ctt/univalence.tex new file mode 100644 index 0000000..7398c6a --- /dev/null +++ b/diagrams/ctt/univalence.tex @@ -0,0 +1,9 @@ +\node[] (i0j1) at (-1, 1) {A}; +\node[] (i1j1) at (1, 1) {B}; +\node[] (i0j0) at (-1, -1) {B}; +\node[] (i1j0) at (1, -1) {B}; + +\draw[<-] (i0j0) -- (i0j1) node [midway] {$\mathrm{equiv}$} node [midway, above, rotate=-90] {$\sim$}; +\draw[->] (i0j0) -- (i1j0) node [midway, below] {$B$}; +\draw[->,dashed] (i0j1) -- (i1j1) node [midway] {}; +\draw[<-] (i1j0) -- (i1j1) node [midway, right] {$\mathrm{id}_B$} node [midway, above, rotate=90] {$\sim$}; \ No newline at end of file diff --git a/diagrams/cubicalsets/acube.tex b/diagrams/cubicalsets/acube.tex new file mode 100644 index 0000000..5eaef43 --- /dev/null +++ b/diagrams/cubicalsets/acube.tex @@ -0,0 +1,30 @@ +\node (a) at (-2.5, 2.5) {a}; +\node (b) at (2.5, 2.5) {b}; +\node (c) at (-2.5, -2.5) {c}; +\node (d) at (2.5, -2.5) {d}; + +\node (w) at (-1, 1) {w}; +\node (x) at (1, 1) {x}; +\node (y) at (-1, -1) {y}; +\node (z) at (1, -1) {z}; + +\draw[->] (a) -- node[midway] {f} (b); +\draw[->] (b) -- node[midway,right] {q} (d); +\draw[->] (a) -- node[midway,left] {p} (c); +\draw[->] (c) -- node[midway,below] {g} (d); + +\draw[->] (w) -- node[midway,below] {h} (x); +\draw[->] (x) -- node[midway,left] {j} (z); +\draw[->] (y) -- node[midway,above] {k} (z); +\draw[->] (w) -- node[midway,right] {l} (y); + +\draw[->] (a) -- node[midway] {$\upsilon$} (w); +\draw[->] (b) -- node[midway] {$\phi$} (x); +\draw[->] (c) -- node[midway] {$\chi$} (y); +\draw[->] (d) -- node[midway] {$\psi$} (z); + +\node (wxyz) at (0, 0) {$\kappa$}; +\node (awyc) at (-1.8, 0) {$\lambda$}; +\node (awxb) at (0, 1.8) {$\mu$}; +\node (bxzd) at (1.8, 0) {$\nu$}; +\node (cyzd) at (0, -1.8) {$\xi$}; \ No newline at end of file diff --git a/diagrams/cubicalsets/aglobe.tex b/diagrams/cubicalsets/aglobe.tex new file mode 100644 index 0000000..efb0837 --- /dev/null +++ b/diagrams/cubicalsets/aglobe.tex @@ -0,0 +1,6 @@ +\node (a) at (-1, 0) {a}; +\node (b) at (1, 0) {d}; + +\draw[->] (a) to[out=45,in=135] node[midway] (f) {$q \circ f$} (b); +\draw[->] (a) to[out=-45,in=-135] node[midway,below] (g) {$g \circ p$} (b); +\draw[double,->] ([yshift=-2pt]f.south) -- node[midway,right] {$\sigma$} ([yshift=2pt]g.north); \ No newline at end of file diff --git a/diagrams/cubicalsets/asquare.tex b/diagrams/cubicalsets/asquare.tex new file mode 100644 index 0000000..1f0926e --- /dev/null +++ b/diagrams/cubicalsets/asquare.tex @@ -0,0 +1,10 @@ +\node (fi0j1) at (-0.75,0.75) {$a$}; +\node (fi0j0) at (-0.75,-0.75) {$b$}; +\node (fi1j1) at (0.75,0.75) {$c$}; +\node (fi1j0) at (0.75,-0.75) {$d$}; +\node (f) at (0, 0) {$\sigma$}; + +\draw[->] (fi0j1) -- (fi1j1) node[midway] (f) {f}; +\draw[->] (fi0j0) -- (fi1j0) node[midway,below] (g) {g}; +\draw[<-] (fi0j0) -- (fi0j1) node[midway,left] (p) {p}; +\draw[<-] (fi1j0) -- (fi1j1) node[midway,right] (q) {q}; \ No newline at end of file diff --git a/diagrams/cubicalsets/cubical_2cell.tex b/diagrams/cubicalsets/cubical_2cell.tex new file mode 100644 index 0000000..4d96ff8 --- /dev/null +++ b/diagrams/cubicalsets/cubical_2cell.tex @@ -0,0 +1,11 @@ +\node (atop) at (-1, 0.5) {a}; +\node (abot) at (-1, -0.5) {a}; +\node (btop) at (1, 0.5) {b}; +\node (bbot) at (1, -0.5) {b}; + +\draw[->] (atop) to[out=30,in=150] node[midway] (f) {f} (btop); +\draw[->] (atop) -- (abot); +\draw[->] (abot) to[out=-30,in=-150] node[midway,below] (g) {g} (bbot); +\draw[->] (btop) -- (bbot); + +\node at (0, 0) {$\alpha$}; \ No newline at end of file diff --git a/diagrams/cubicalsets/degeneracies.tex b/diagrams/cubicalsets/degeneracies.tex new file mode 100644 index 0000000..a843c4c --- /dev/null +++ b/diagrams/cubicalsets/degeneracies.tex @@ -0,0 +1,24 @@ +\node (a) at (-1, 0) {$a$}; + +\node (a0) at (0, 0.75) {$a$}; +\node (a1) at (0, -0.75) {$a$}; +\draw[->] (a0) -- node[midway] (al) {} (a1); +\draw[dashed,->] (a) to[] node[midway,above] {$\sigma$} ([xshift=-0.5em]al); + +\node (fi0) at (1, 0.75) {$a$}; +\node (fi1) at (1, -0.75) {$b$}; +\draw[->] (fi0) -- node[midway,right] (f) {f} (fi1); + +\node (fi0j1) at (4 + -0.75, 0.75) {$a$}; +\node (fi0j0) at (4 + -0.75, -0.75) {$a$}; +\node (fi1j1) at (4 + 0.75, 0.75) {$b$}; +\node (fi1j0) at (4 + 0.75, -0.75) {$b$}; + +\draw[->] (fi0j1) -- (fi1j1) node[midway] (fs) {f}; +\draw[->] (fi0j0) -- (fi1j0) node[midway,below] (gs) {f}; +\draw[<-] (fi0j0) -- (fi0j1) node[midway,left] (p) {}; +\draw[<-] (fi1j0) -- (fi1j1) node[midway,right] (q) {}; + +\node (sq) at (4, 0) {$\bullet$}; + +\draw[dashed,->] (f) to[out=20,in=160] node[midway,below] {$\sigma_0 \circ \sigma$} (sq); \ No newline at end of file diff --git a/diagrams/cubicalsets/del_asquare.tex b/diagrams/cubicalsets/del_asquare.tex new file mode 100644 index 0000000..a2401e1 --- /dev/null +++ b/diagrams/cubicalsets/del_asquare.tex @@ -0,0 +1,9 @@ +\node (fi0j1) at (-0.75,0.75) {$a$}; +\node (fi0j0) at (-0.75,-0.75) {$b$}; +\node (fi1j1) at (0.75,0.75) {$c$}; +\node (fi1j0) at (0.75,-0.75) {$d$}; + +\draw[->] (fi0j1) -- (fi1j1) node[midway] (f) {f}; +\draw[->] (fi0j0) -- (fi1j0) node[midway,below] (g) {g}; +\draw[<-] (fi0j0) -- (fi0j1) node[midway,left] (p) {p}; +\draw[<-] (fi1j0) -- (fi1j1) node[midway,right] (q) {q}; \ No newline at end of file diff --git a/diagrams/cubicalsets/delta10_delta0.tex b/diagrams/cubicalsets/delta10_delta0.tex new file mode 100644 index 0000000..1eda218 --- /dev/null +++ b/diagrams/cubicalsets/delta10_delta0.tex @@ -0,0 +1,17 @@ +\node (point) at (0, 0) {$\color{red}{\bullet}$}; + +\node (line0) at (2, 1) {$\color{blue}{\bullet}_0$}; +\node (line1) at (2, -1) {$\color{red}{\bullet}_1$}; + +\draw (line0) -- (line1) node[midway] (linemid) {}; +; +\draw[->] (point) to[out=-70,in=180] node[midway] (a) {$\delta^1$} (line1); + +\node (sq00) at (4, -1) {$\color{red}\bullet_{01}$}; +\node (sq01) at (4, 1) {$\color{blue}{\bullet}_{00}$}; +\node (sq10) at (6, -1) {$\bullet_{11}$}; +\node (sq11) at (6, 1) {$\bullet_{10}$}; + +\draw (sq00) -- node[midway] (linemid_img) {} (sq01) -- (sq11) -- (sq10) -- (sq00); + +\draw[->] (linemid) -- node[midway] (b) {$\delta^0_0$} (linemid_img); \ No newline at end of file diff --git a/diagrams/cubicalsets/facemap.tex b/diagrams/cubicalsets/facemap.tex new file mode 100644 index 0000000..ed82cfb --- /dev/null +++ b/diagrams/cubicalsets/facemap.tex @@ -0,0 +1,43 @@ +\node (j1) at (0,1) {$\color{red}\bullet$}; +\node (j0) at (0,0) {$\color{red}\bullet$}; + +\node (mid) at (0,0.5) {}; + +\draw[color=red] (j1) -- (j0); + +\node (i0j1) at (1,1) {$\color{red}\bullet$}; +\node (i0j0) at (1,0) {$\color{red}\bullet$}; +\node (i1j1) at (2,1) {$\bullet$}; +\node (i1j0) at (2,0) {$\bullet$}; + +\draw[color=red] (i0j1) -- (i0j0); +\draw (i0j1) -- (i1j1) -- (i1j0) -- (i0j0); + +\node (mid2) at (1,0.5) {}; + +\draw[->] (mid) -> (mid2); + +\node (F) at (-0.6, 0.5) {$\mathcal{F}$}; +\node[fit=(j0)(i1j1), left delimiter=(, inner sep=-0.7ex, right delimiter=)] (openF) {}; + +\node (colon) at (2.6, 0.5) {$:$}; + +\node (F2) at (2.9, 0.5) {$\mathcal{F}$}; + +\node (fi0j1) at (3.5,1) {$\color{red}\bullet$}; +\node (fi0j0) at (3.5,0) {$\color{red}\bullet$}; +\node (fi1j1) at (4.5,1) {$\bullet$}; +\node (fi1j0) at (4.5,0) {$\bullet$}; + +\draw[color=red] (fi0j1) -- (fi0j0); +\draw (fi0j1) -- (fi1j1) -- (fi1j0) -- (fi0j0); +\node[fit=(fi0j0)(fi1j1), left delimiter=(, inner sep=-0.7ex, right delimiter=)] (openF2) {}; + +\node (F3) at (6.1, 0.5) {$\mathcal{F}$}; + +\draw[->] ([xshift=2.1ex]openF2.east) -- (F3); + +\node (fj1) at (6.7,1) {$\color{red}\bullet$}; +\node (fj0) at (6.7,0) {$\color{red}\bullet$}; +\node[fit=(fj1)(fj0), left delimiter=(, inner sep=-0.7ex, right delimiter=)] (openF3) {}; +\draw[color=red] (fj1) -- (fj0); \ No newline at end of file diff --git a/diagrams/cubicalsets/first_ncubes.tex b/diagrams/cubicalsets/first_ncubes.tex new file mode 100644 index 0000000..7d4bfe3 --- /dev/null +++ b/diagrams/cubicalsets/first_ncubes.tex @@ -0,0 +1,38 @@ +\node at (-5, 0) {$\bullet_{()}$}; + +\node (line0) at (-3, 1) {$\bullet_0$}; +\node (line1) at (-3, -1) {$\bullet_1$}; +\draw[->] (line0) -> (line1); + +\node (sq00) at (-1, 1) {$\bullet_{00}$}; +\node (sq01) at (-1, -1) {$\bullet_{01}$}; +\node (sq10) at (1, 1) {$\bullet_{10}$}; +\node (sq11) at (1, -1) {$\bullet_{11}$}; + +\draw[->] (sq00) -> (sq01); +\draw[->] (sq00) -> (sq10); +\draw[->] (sq10) -> (sq11); +\draw[->] (sq01) -> (sq11); + +\node (sq010) at (3, -1) {$\bullet_{000}$}; +\node (sq011) at (4, 0) {$\bullet_{001}$}; +\node (sq110) at (5, -1) {$\bullet_{100}$}; +\node (sq111) at (6, 0) {$\bullet_{101}$}; + +\node (sq000) at (3, 1) {$\bullet_{010}$}; +\node (sq001) at (4, 2) {$\bullet_{011}$}; +\node (sq100) at (5, 1) {$\bullet_{110}$}; +\node (sq101) at (6, 2) {$\bullet_{111}$}; + +\draw[->] (sq000) -- (sq001); +\draw[->] (sq000) -- (sq100); +\draw[->] (sq000) -- (sq010); +\draw[->] (sq001) -- (sq011); +\draw[->] (sq001) -- (sq101); +\draw[->] (sq010) -- (sq110); +\draw[->] (sq010) -- (sq011); +\draw[->] (sq100) -- (sq101); +\draw[->] (sq100) -- (sq110); +\draw[->] (sq101) -- (sq111); +\draw[->] (sq110) -- (sq111); +\draw[->] (sq011) -- (sq111); \ No newline at end of file diff --git a/diagrams/cubicalsets/globular_2cell.tex b/diagrams/cubicalsets/globular_2cell.tex new file mode 100644 index 0000000..a000633 --- /dev/null +++ b/diagrams/cubicalsets/globular_2cell.tex @@ -0,0 +1,6 @@ +\node (a) at (-1, 0) {a}; +\node (b) at (1, 0) {b}; + +\draw[->] (a) to[out=30,in=150] node[midway] (f) {f} (b); +\draw[->] (a) to[out=-30,in=-150] node[midway,below] (g) {g} (b); +\draw[double,->] ([yshift=-2pt]f.south) -- node[midway,right] {$\alpha$} ([yshift=2pt]g.north); \ No newline at end of file diff --git a/diagrams/cubicalsets/kan_condition.tex b/diagrams/cubicalsets/kan_condition.tex new file mode 100644 index 0000000..1cfdf43 --- /dev/null +++ b/diagrams/cubicalsets/kan_condition.tex @@ -0,0 +1,7 @@ +\node (open) at (0, 2) {$\sqcap^{n,i,\varepsilon}$}; +\node (box) at (0, 0) {$\square^n$}; +\node (set) at (2, 0) {$X$}; + +\draw[right hook->] (open) -- (box); +\draw[->] (open) -- node[midway] {f} (set); +\draw[dotted, ->] (box) -- node[midway, below] {g} (set); \ No newline at end of file diff --git a/diagrams/cubicalsets/left_inv.tex b/diagrams/cubicalsets/left_inv.tex new file mode 100644 index 0000000..7ac9c1a --- /dev/null +++ b/diagrams/cubicalsets/left_inv.tex @@ -0,0 +1,9 @@ +\node (sq1_b00) at (-3, 1) {B}; +\node (sq1_a10) at (-1, 1) {A}; +\node (sq1_b01) at (-3, -1) {B}; +\node (sq1_b11) at (-1, -1) {B}; + +\draw[dashed,->] (sq1_b00) -- node[midway] {g} (sq1_a10); +\draw[->] (sq1_a10) -> node[midway] {f} (sq1_b11); +\draw[->] (sq1_b00) -> node[midway,left] {1} (sq1_b01); +\draw[->] (sq1_b01) -> node[midway,below] {1} (sq1_b11); \ No newline at end of file diff --git a/diagrams/cubicalsets/naturality.tex b/diagrams/cubicalsets/naturality.tex new file mode 100644 index 0000000..9fb347e --- /dev/null +++ b/diagrams/cubicalsets/naturality.tex @@ -0,0 +1,9 @@ +\node (sq1_b00) at (-3, 1) {$F(c\prime)$}; +\node (sq1_a10) at (-1, 1) {$F(c)$}; +\node (sq1_b01) at (-3, -1) {$G(c\prime)$}; +\node (sq1_b11) at (-1, -1) {$G(c)$}; + +\draw[->] (sq1_b00) -- node[midway] {$F(f)$} (sq1_a10); +\draw[->] (sq1_a10) -> node[midway] {$\alpha_{c}$} (sq1_b11); +\draw[->] (sq1_b00) -> node[midway,left] {$\alpha_{c\prime}$} (sq1_b01); +\draw[->] (sq1_b01) -> node[midway,below] {$G(f)$} (sq1_b11); \ No newline at end of file diff --git a/diagrams/cubicalsets/open_box.tex b/diagrams/cubicalsets/open_box.tex new file mode 100644 index 0000000..7c7701f --- /dev/null +++ b/diagrams/cubicalsets/open_box.tex @@ -0,0 +1,9 @@ +\node (fi0j1) at (-0.75, 0.75) {$a$}; +\node (fi0j0) at (-0.75, -0.75) {$b$}; +\node (fi1j1) at (0.75, 0.75) {$c$}; +\node (fi1j0) at (0.75, -0.75) {$d$}; + +\draw[->,dotted] (fi0j1) -- (fi1j1) node[midway] (f) {f}; +\draw[->] (fi0j0) -- (fi1j0) node[midway,below] (g) {g}; +\draw[<-] (fi0j0) -- (fi0j1) node[midway,left] (p) {p}; +\draw[<-] (fi1j0) -- (fi1j1) node[midway,right] (q) {q}; \ No newline at end of file diff --git a/diagrams/cubicalsets/open_boxes.tex b/diagrams/cubicalsets/open_boxes.tex new file mode 100644 index 0000000..efb857d --- /dev/null +++ b/diagrams/cubicalsets/open_boxes.tex @@ -0,0 +1,39 @@ +\node (fi0j1) at (-0.75, 0.75) {$a$}; +\node (fi0j0) at (-0.75, -0.75) {$b$}; +\node (fi1j1) at (0.75, 0.75) {$c$}; +\node (fi1j0) at (0.75, -0.75) {$d$}; + +\draw[->,dotted] (fi0j1) -- (fi1j1) node[midway] (f) {f}; +\draw[->] (fi0j0) -- (fi1j0) node[midway,below] (g) {g}; +\draw[<-] (fi0j0) -- (fi0j1) node[midway,left] (p) {p}; +\draw[<-] (fi1j0) -- (fi1j1) node[midway,right] (q) {q}; + +\node (fi0j1) at (2.5 + -0.75, 0.75) {$a$}; +\node (fi0j0) at (2.5 + -0.75, -0.75) {$b$}; +\node (fi1j1) at (2.5 + 0.75, 0.75) {$c$}; +\node (fi1j0) at (2.5 + 0.75, -0.75) {$d$}; + +\draw[->] (fi0j1) -- (fi1j1) node[midway] (f) {f}; +\draw[->,dotted] (fi0j0) -- (fi1j0) node[midway,below] (g) {g}; +\draw[<-] (fi0j0) -- (fi0j1) node[midway,left] (p) {p}; +\draw[<-] (fi1j0) -- (fi1j1) node[midway,right] (q) {q}; + +\node (fi0j1) at (5 + -0.75, 0.75) {$a$}; +\node (fi0j0) at (5 + -0.75, -0.75) {$b$}; +\node (fi1j1) at (5 + 0.75, 0.75) {$c$}; +\node (fi1j0) at (5 + 0.75, -0.75) {$d$}; + +\draw[->] (fi0j1) -- (fi1j1) node[midway] (f) {f}; +\draw[->] (fi0j0) -- (fi1j0) node[midway,below] (g) {g}; +\draw[<-,dotted] (fi0j0) -- (fi0j1) node[midway,left] (p) {p}; +\draw[<-] (fi1j0) -- (fi1j1) node[midway,right] (q) {q}; + +\node (fi0j1) at (7.5 + -0.75, 0.75) {$a$}; +\node (fi0j0) at (7.5 + -0.75, -0.75) {$b$}; +\node (fi1j1) at (7.5 + 0.75, 0.75) {$c$}; +\node (fi1j0) at (7.5 + 0.75, -0.75) {$d$}; + +\draw[->] (fi0j1) -- (fi1j1) node[midway] (f) {f}; +\draw[->] (fi0j0) -- (fi1j0) node[midway,below] (g) {g}; +\draw[<-] (fi0j0) -- (fi0j1) node[midway,left] (p) {p}; +\draw[<-,dotted] (fi1j0) -- (fi1j1) node[midway,right] (q) {q}; \ No newline at end of file diff --git a/diagrams/cubicalsets/right_inv.tex b/diagrams/cubicalsets/right_inv.tex new file mode 100644 index 0000000..550cc16 --- /dev/null +++ b/diagrams/cubicalsets/right_inv.tex @@ -0,0 +1,9 @@ +\node (sq1_a00) at (-3, 1) {A}; +\node (sq1_b10) at (-1, 1) {B}; +\node (sq1_a01) at (-3, -1) {A}; +\node (sq1_a11) at (-1, -1) {A}; + +\draw[dashed,->] (sq1_b10) -- node[midway] {h} (sq1_a11); +\draw[->] (sq1_a00) -- node[midway] {f} (sq1_b10); +\draw[->] (sq1_a00) -- node[midway,left] {1} (sq1_a01); +\draw[->] (sq1_a01) -- node[midway,below] {1} (sq1_a11); \ No newline at end of file diff --git a/diagrams/cubicalsets/thin_squares.tex b/diagrams/cubicalsets/thin_squares.tex new file mode 100644 index 0000000..a218e0a --- /dev/null +++ b/diagrams/cubicalsets/thin_squares.tex @@ -0,0 +1,19 @@ +\node (fi0j1) at (-0.75, 0.75) {$a$}; +\node (fi0j0) at (-0.75, -0.75) {$b$}; +\node (fi1j1) at (0.75, 0.75) {$a$}; +\node (fi1j0) at (0.75, -0.75) {$b$}; + +\draw[->] (fi0j1) -- (fi1j1) node[midway] (f) {1}; +\draw[->] (fi0j0) -- (fi1j0) node[midway,below] (g) {1}; +\draw[<-] (fi0j0) -- (fi0j1) node[midway,left] (p) {f}; +\draw[<-] (fi1j0) -- (fi1j1) node[midway,right] (q) {f}; + +\node (fi0j1) at (2.5 + -0.75, 0.75) {$a$}; +\node (fi0j0) at (2.5 + -0.75, -0.75) {$a$}; +\node (fi1j1) at (2.5 + 0.75, 0.75) {$b$}; +\node (fi1j0) at (2.5 + 0.75, -0.75) {$b$}; + +\draw[->] (fi0j1) -- (fi1j1) node[midway] (f) {f}; +\draw[->] (fi0j0) -- (fi1j0) node[midway,below] (g) {f}; +\draw[<-] (fi0j0) -- (fi0j1) node[midway,left] (p) {1}; +\draw[<-] (fi1j0) -- (fi1j1) node[midway,right] (q) {1}; \ No newline at end of file diff --git a/diagrams/eq/0cube.tex b/diagrams/eq/0cube.tex new file mode 100644 index 0000000..5d698e2 --- /dev/null +++ b/diagrams/eq/0cube.tex @@ -0,0 +1 @@ +\node[draw,circle,label=right:$x:A$,fill,outer sep=0.1cm, inner sep=0pt, minimum size=0.1cm] (i0) at (0, 0) {}; \ No newline at end of file diff --git a/diagrams/eq/1cube.tex b/diagrams/eq/1cube.tex new file mode 100644 index 0000000..c5e8dd3 --- /dev/null +++ b/diagrams/eq/1cube.tex @@ -0,0 +1,4 @@ +\node[draw,circle,label=left:{$A[0/i]$},fill,outer sep=0.1cm, inner sep=0pt, minimum size=0.1cm] (i0) at (-1, 0) {}; +\node[draw,circle,label=right:{$A[1/i]$},fill,outer sep=0.1cm, inner sep=0pt, minimum size=0.1cm] (i1) at (1, 0) {}; + +\draw (i0) -- (i1); \ No newline at end of file diff --git a/diagrams/eq/2cube.tex b/diagrams/eq/2cube.tex new file mode 100644 index 0000000..a28d726 --- /dev/null +++ b/diagrams/eq/2cube.tex @@ -0,0 +1,6 @@ +\node[draw,circle,label=left:{$A[0/i, 0/j]$},fill,outer sep=0.1cm, inner sep=0pt, minimum size=0.1cm] (i0j0) at (-1, -1) {}; +\node[draw,circle,label=right:{$A[1/i, 0/j]$},fill,outer sep=0.1cm, inner sep=0pt, minimum size=0.1cm] (i1j0) at (1, -1) {}; +\node[draw,circle,label=left:{$A[0/i, 1/j]$},fill,outer sep=0.1cm, inner sep=0pt, minimum size=0.1cm] (i0j1) at (-1, 1) {}; +\node[draw,circle,label=right:{$A[1/i, 1/j]$},fill,outer sep=0.1cm, inner sep=0pt, minimum size=0.1cm] (i1j1) at (1, 1) {}; + +\draw (i0j0) -- (i1j0) -- (i1j1) -- (i0j1) -- (i0j0); \ No newline at end of file diff --git a/diagrams/eq/interval.tex b/diagrams/eq/interval.tex new file mode 100644 index 0000000..2f4eaa2 --- /dev/null +++ b/diagrams/eq/interval.tex @@ -0,0 +1,5 @@ +\node[draw,circle,label=below:$i_0$,fill,outer sep=0.1cm, inner sep=0pt, minimum size=0.1cm] (i0) at (-1, 0) {}; +\node[draw,circle,label=below:$i_1$,fill,outer sep=0.1cm, inner sep=0pt, minimum size=0.1cm] (i1) at (1, 0) {}; + +\draw (i0) -- (i1) node [midway, above] (seg) {seg}; +% \draw[-] (i0) -- (i1); \ No newline at end of file diff --git a/diagrams/gm/app_gx.tex b/diagrams/gm/app_gx.tex new file mode 100644 index 0000000..56dd630 --- /dev/null +++ b/diagrams/gm/app_gx.tex @@ -0,0 +1,39 @@ +\begin{scope}[node distance=0.75cm] + +\node (FGX) [inner xsep=0.01cm, inner ysep=0.03cm] at (0, 0) {@}; +\node (FG) [xshift=0.25cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=FGX, left of=FGX] {@}; +\node (X) [xshift=-0.25cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=FGX, right of=FGX] {x}; +\node (F) [xshift=0.25cm, inner xsep=0.04cm, inner ysep=0.05cm, below of=FG, left of=FG, xshift=2] {f}; +\node (G) [xshift=-0.25cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=FG, right of=FG, xshift=-2] {g}; + +\node (GX) + [xshift=-0.25cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=F, xshift=0.75cm] + {@}; + +\draw[->] (FGX) to (X); +\draw[->] (FGX) to (FG); +\draw[->] (FG) to (F.north east); +\draw[->] (FG) to (G.north west); + +\draw[->] (GX) to ([shift=({-0.35cm,-0.35cm})]GX) + -- ++(0, -0.10cm) + -| (G); + +\draw[->] (GX) to ([shift=({0.45cm,-0.35cm})]GX) + -| (X); + +\node (Stk0) [draw, shape=rectangle, minimum width=1.5cm, minimum height=0.75cm, anchor=center] + at (-2.75, 0) {}; +\node (Stk1) [draw, shape=rectangle, minimum width=1.5cm, minimum height=0.75cm, anchor=center] + at (-2.75, -0.75) {}; +\node (Stk2) [draw, shape=rectangle, minimum width=1.5cm, minimum height=0.75cm, anchor=center] + at (-2.75, -1.5) {}; +\node (Stk3) [draw, shape=rectangle, minimum width=1.5cm, minimum height=0.75cm, anchor=center] + at (-2.75, -2.25) {}; + +\draw[->] (Stk0.center) to (FGX); +\draw[->] (Stk1.center) to (FG); +\draw[->] (Stk2.center) to (F); +\draw[->] (Stk3.center) to (GX); + +\end{scope} diff --git a/diagrams/gm/app_kgx.tex b/diagrams/gm/app_kgx.tex new file mode 100644 index 0000000..36fceaa --- /dev/null +++ b/diagrams/gm/app_kgx.tex @@ -0,0 +1,51 @@ +\begin{scope}[node distance=0.75cm] + +\node (FGX) [inner xsep=0.01cm, inner ysep=0.03cm] at (0, 0) {@}; +\node (FG) [xshift=0.25cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=FGX, left of=FGX] {@}; +\node (X) [xshift=-0.25cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=FGX, right of=FGX] {x}; +\node (F) [xshift=0.25cm, inner xsep=0.04cm, inner ysep=0.05cm, below of=FG, left of=FG, xshift=2] {f}; +\node (G) [xshift=-0.25cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=FG, right of=FG, xshift=-2] {g}; + +\node (KGX) + [xshift=-0.55cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=F, xshift=0.75cm] + {@}; + +\node (K) + [xshift=0.25cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=KGX, left of=KGX] + {K}; + +\node (GX) + [xshift=-0.45cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=KGX, right of=KGX] + {@}; + +\draw[->] (FGX) to (X); +\draw[->] (FGX) to (FG); +\draw[->] (FG) to (F.north east); +\draw[->] (FG) to (G.north west); + +\draw[->] (KGX) to (K); +\draw[->] (KGX) to (GX); + +\draw[->] (GX) to ([shift=({-0.35cm,-0.35cm})]GX) + -- ++(0, -0.10cm) + -| (G); + +\draw[->] (GX) to ([shift=({0.45cm,-0.35cm})]GX) + -| (X); + +\node (Stk0) [draw, shape=rectangle, minimum width=1.5cm, minimum height=0.75cm, anchor=center] + at (-2.75, 0) {}; +\node (Stk1) [draw, shape=rectangle, minimum width=1.5cm, minimum height=0.75cm, anchor=center] + at (-2.75, -0.75) {}; +\node (Stk2) [draw, shape=rectangle, minimum width=1.5cm, minimum height=0.75cm, anchor=center] + at (-2.75, -1.5) {}; +\node (Stk3) [draw, shape=rectangle, minimum width=1.5cm, minimum height=0.75cm, anchor=center] + at (-2.75, -2.25) {}; + + +\draw[->] (Stk0.center) to (FGX); +\draw[->] (Stk1.center) to (FG); +\draw[->] (Stk2.center) to (F); +\draw[->] (Stk3.center) to (KGX); + +\end{scope} diff --git a/diagrams/gm/entry.tex b/diagrams/gm/entry.tex new file mode 100644 index 0000000..a8becd8 --- /dev/null +++ b/diagrams/gm/entry.tex @@ -0,0 +1,25 @@ +\begin{scope}[node distance=0.75cm] + +\node (FGX) [inner xsep=0.01cm, inner ysep=0.03cm] at (0, 0) {@}; +\node (FG) [xshift=0.25cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=FGX, left of=FGX] {@}; +\node (X) [xshift=-0.25cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=FGX, right of=FGX] {x}; +\node (F) [xshift=0.25cm, inner xsep=0.04cm, inner ysep=0.05cm, below of=FG, left of=FG, xshift=2] {f}; +\node (G) [xshift=-0.25cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=FG, right of=FG, xshift=-2] {g}; + +\draw[->] (FGX) to (X); +\draw[->] (FGX) to (FG); +\draw[->] (FG) to (F.north east); +\draw[->] (FG) to (G.north west); + +\node (Stk0) [draw, shape=rectangle, minimum width=1.5cm, minimum height=0.75cm, anchor=center] + at (-2.75, 0) {}; +\node (Stk1) [draw, shape=rectangle, minimum width=1.5cm, minimum height=0.75cm, anchor=center] + at (-2.75, -0.75) {}; +\node (Stk2) [draw, shape=rectangle, minimum width=1.5cm, minimum height=0.75cm, anchor=center] + at (-2.75, -1.5) {}; + +\draw[->] (Stk0.center) to (FGX); +\draw[->] (Stk1.center) to (FG); +\draw[->] (Stk2.center) to (F); + +\end{scope} diff --git a/diagrams/gm/push_g.tex b/diagrams/gm/push_g.tex new file mode 100644 index 0000000..3f2b08f --- /dev/null +++ b/diagrams/gm/push_g.tex @@ -0,0 +1,31 @@ +\begin{scope}[node distance=0.75cm] + +\node (FGX) [inner xsep=0.01cm, inner ysep=0.03cm] at (0, 0) {@}; +\node (FG) [xshift=0.25cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=FGX, left of=FGX] {@}; +\node (X) [xshift=-0.25cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=FGX, right of=FGX] {x}; +\node (F) [xshift=0.25cm, inner xsep=0.04cm, inner ysep=0.05cm, below of=FG, left of=FG, xshift=2] {f}; +\node (G) [xshift=-0.25cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=FG, right of=FG, xshift=-2] {g}; + +\draw[->] (FGX) to (X); +\draw[->] (FGX) to (FG); +\draw[->] (FG) to (F.north east); +\draw[->] (FG) to (G.north west); + +\node (Stk0) [draw, shape=rectangle, minimum width=1.5cm, minimum height=0.75cm, anchor=center] + at (-2.75, 0) {}; +\node (Stk1) [draw, shape=rectangle, minimum width=1.5cm, minimum height=0.75cm, anchor=center] + at (-2.75, -0.75) {}; +\node (Stk2) [draw, shape=rectangle, minimum width=1.5cm, minimum height=0.75cm, anchor=center] + at (-2.75, -1.5) {}; +\node (Stk3) [draw, shape=rectangle, minimum width=1.5cm, minimum height=0.75cm, anchor=center] + at (-2.75, -2.25) {}; +\node (Stk4) [draw, shape=rectangle, minimum width=1.5cm, minimum height=0.75cm, anchor=center] + at (-2.75, -3) {}; + +\draw[->] (Stk0.center) to (FGX); +\draw[->] (Stk1.center) to (FG); +\draw[->] (Stk2.center) to (F); +\draw[->] (Stk3.center) to (X |- 0, -2.25cm) -- (X); +\draw[->] (Stk4.center) to (G |- 0, -3cm) -- (G); + +\end{scope} diff --git a/diagrams/gm/push_k.tex b/diagrams/gm/push_k.tex new file mode 100644 index 0000000..720c418 --- /dev/null +++ b/diagrams/gm/push_k.tex @@ -0,0 +1,44 @@ +\begin{scope}[node distance=0.75cm] + +\node (FGX) [inner xsep=0.01cm, inner ysep=0.03cm] at (0, 0) {@}; +\node (FG) [xshift=0.25cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=FGX, left of=FGX] {@}; +\node (X) [xshift=-0.25cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=FGX, right of=FGX] {x}; +\node (F) [xshift=0.25cm, inner xsep=0.04cm, inner ysep=0.05cm, below of=FG, left of=FG, xshift=2] {f}; +\node (G) [xshift=-0.25cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=FG, right of=FG, xshift=-2] {g}; + +\node (GX) + [xshift=-0.25cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=F, xshift=0.75cm] + {@}; + +\draw[->] (FGX) to (X); +\draw[->] (FGX) to (FG); +\draw[->] (FG) to (F.north east); +\draw[->] (FG) to (G.north west); + +\draw[->] (GX) to ([shift=({-0.35cm,-0.35cm})]GX) + -- ++(0, -0.10cm) + -| (G); + +\draw[->] (GX) to ([shift=({0.45cm,-0.35cm})]GX) + -| (X); + +\node (Stk0) [draw, shape=rectangle, minimum width=1.5cm, minimum height=0.75cm, anchor=center] + at (-2.75, 0) {}; +\node (Stk1) [draw, shape=rectangle, minimum width=1.5cm, minimum height=0.75cm, anchor=center] + at (-2.75, -0.75) {}; +\node (Stk2) [draw, shape=rectangle, minimum width=1.5cm, minimum height=0.75cm, anchor=center] + at (-2.75, -1.5) {}; +\node (Stk3) [draw, shape=rectangle, minimum width=1.5cm, minimum height=0.75cm, anchor=center] + at (-2.75, -2.25) {}; +\node (Stk4) [draw, shape=rectangle, minimum width=1.5cm, minimum height=0.75cm, anchor=center] + at (-2.75, -3) {}; + +\node (K) [right of=Stk4, xshift=1.5cm] {K}; + +\draw[->] (Stk0.center) to (FGX); +\draw[->] (Stk1.center) to (FG); +\draw[->] (Stk2.center) to (F); +\draw[->] (Stk3.center) to (GX); +\draw[->] (Stk4.center) to (K); + +\end{scope} diff --git a/diagrams/gm/push_x.tex b/diagrams/gm/push_x.tex new file mode 100644 index 0000000..bd416c6 --- /dev/null +++ b/diagrams/gm/push_x.tex @@ -0,0 +1,33 @@ +\begin{scope}[node distance=0.75cm] + +\node (FGX) [inner xsep=0.01cm, inner ysep=0.03cm] + at (0, 0) {@}; +\node (FG) [xshift=0.25cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=FGX, left of=FGX] + {@}; +\node (X) [xshift=-0.25cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=FGX, right of=FGX] + {x}; +\node (F) [xshift=0.25cm, inner xsep=0.04cm, inner ysep=0.05cm, below of=FG, left of=FG, xshift=2] + {f}; +\node (G) [xshift=-0.25cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=FG, right of=FG, xshift=-2] + {g}; + +\draw[->] (FGX) to (X); +\draw[->] (FGX) to (FG); +\draw[->] (FG) to (F.north east); +\draw[->] (FG) to (G.north west); + +\node (Stk0) [draw, shape=rectangle, minimum width=1.5cm, minimum height=0.75cm, anchor=center] + at (-2.75, 0) {}; +\node (Stk1) [draw, shape=rectangle, minimum width=1.5cm, minimum height=0.75cm, anchor=center] + at (-2.75, -0.75) {}; +\node (Stk2) [draw, shape=rectangle, minimum width=1.5cm, minimum height=0.75cm, anchor=center] + at (-2.75, -1.5) {}; +\node (Stk3) [draw, shape=rectangle, minimum width=1.5cm, minimum height=0.75cm, anchor=center] + at (-2.75, -2.25) {}; + +\draw[->] (Stk0.center) to (FGX); +\draw[->] (Stk1.center) to (FG); +\draw[->] (Stk2.center) to (F); +\draw[->] (Stk3.center) to (0.5cm, -2.25cm) -- (X); + +\end{scope} diff --git a/diagrams/gm/slide_3.tex b/diagrams/gm/slide_3.tex new file mode 100644 index 0000000..c386ac3 --- /dev/null +++ b/diagrams/gm/slide_3.tex @@ -0,0 +1,34 @@ +\begin{scope}[node distance=0.75cm] + +\node (KGX) + [xshift=-0.55cm, inner xsep=0.01cm, inner ysep=0.03cm] + at (0, 0) {@}; + +\node (K) + [xshift=0.25cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=KGX, left of=KGX] + {K}; + +\node (GX) + [xshift=-0.45cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=KGX, right of=KGX] + {@}; + +\node (G) + [xshift=0.45cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=GX, left of=GX] + {g}; + +\node (X) + [xshift=-0.45cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=GX, right of=GX] + {x}; + +\draw[->] (KGX) to (K); +\draw[->] (KGX) to (GX); + +\draw[->] (GX) to (G); +\draw[->] (GX) to (X); + +\node (Stk0) [draw, shape=rectangle, minimum width=1.5cm, minimum height=0.75cm, anchor=center] + at (-2.75, 0) {}; + +\draw[->] (Stk0.center) to (KGX); + +\end{scope} diff --git a/diagrams/gm/spine+stack.tex b/diagrams/gm/spine+stack.tex new file mode 100644 index 0000000..8747541 --- /dev/null +++ b/diagrams/gm/spine+stack.tex @@ -0,0 +1,25 @@ +\begin{scope}[node distance=0.75cm] + +\node (FGX) [color=blue,inner xsep=0.01cm, inner ysep=0.03cm] at (0, 0) {@}; +\node (FG) [color=blue,xshift=0.25cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=FGX, left of=FGX] {@}; +\node (X) [xshift=-0.25cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=FGX, right of=FGX] {y}; +\node (F) [color=blue,xshift=0.25cm, inner xsep=0.04cm, inner ysep=0.05cm, below of=FG, left of=FG, xshift=2] {f}; +\node (G) [xshift=-0.25cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=FG, right of=FG, xshift=-2] {x}; + +\draw[->] (FGX) to (X); +\draw[->,color=blue] (FGX) to (FG); +\draw[->,color=blue] (FG) to (F.north east); +\draw[->] (FG) to (G.north west); + +\node (Stk0) [draw, shape=rectangle, minimum width=1.5cm, minimum height=0.75cm, anchor=center] + at (-2.75, 0) {}; +\node (Stk1) [draw, shape=rectangle, minimum width=1.5cm, minimum height=0.75cm, anchor=center] + at (-2.75, -0.75) {}; +\node (Stk2) [draw, shape=rectangle, minimum width=1.5cm, minimum height=0.75cm, anchor=center] + at (-2.75, -1.5) {}; + +\draw[->] (Stk0.center) to (FGX); +\draw[->] (Stk1.center) to (FG); +\draw[->] (Stk2.center) to (F); + +\end{scope} diff --git a/diagrams/gm/spine.tex b/diagrams/gm/spine.tex new file mode 100644 index 0000000..b290d32 --- /dev/null +++ b/diagrams/gm/spine.tex @@ -0,0 +1,14 @@ +\begin{scope}[node distance=0.75cm] + +\node (FGX) [color=blue,inner xsep=0.01cm, inner ysep=0.03cm] at (0, 0) {@}; +\node (FG) [color=blue,xshift=0.25cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=FGX, left of=FGX] {@}; +\node (X) [xshift=-0.25cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=FGX, right of=FGX] {y}; +\node (F) [color=blue,xshift=0.25cm, inner xsep=0.04cm, inner ysep=0.05cm, below of=FG, left of=FG, xshift=2] {f}; +\node (G) [xshift=-0.25cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=FG, right of=FG, xshift=-2] {x}; + +\draw[->] (FGX) to (X); +\draw[->,color=blue] (FGX) to (FG); +\draw[->,color=blue] (FG) to (F.north east); +\draw[->] (FG) to (G.north west); + +\end{scope} diff --git a/diagrams/template/step1.tex b/diagrams/template/step1.tex new file mode 100644 index 0000000..3b1ce8d --- /dev/null +++ b/diagrams/template/step1.tex @@ -0,0 +1 @@ +\node at (0, 0) {main}; diff --git a/diagrams/template/step2.tex b/diagrams/template/step2.tex new file mode 100644 index 0000000..7389b10 --- /dev/null +++ b/diagrams/template/step2.tex @@ -0,0 +1,26 @@ +\begin{scope}[node distance=0.75cm] + +\node (DoDo4) [inner xsep=0.01cm, inner ysep=0.03cm] at (0, 0) {@}; + +\node (Do) + [xshift=0.25cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=DoDo4, left of=DoDo4] + {double}; + +\node (Do4) + [xshift=-0.25cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=DoDo4, right of=DoDo4] + {@}; + +\node (Do_2) + [xshift=0.25cm, inner xsep=0.04cm, inner ysep=0.05cm, below of=Do4, left of=Do4, xshift=2] + {double}; + +\node (4) + [xshift=-0.25cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=Do4, right of=Do4, xshift=-2] + {4}; + +\draw[->] (DoDo4) to (Do); +\draw[->] (DoDo4) to (Do4); +\draw[->] (Do4) to (Do_2); +\draw[->] (Do4) to (4); + +\end{scope} diff --git a/diagrams/template/step3.tex b/diagrams/template/step3.tex new file mode 100644 index 0000000..00c1612 --- /dev/null +++ b/diagrams/template/step3.tex @@ -0,0 +1,33 @@ +\begin{scope}[node distance=0.75cm] + +\node (DoDo4) [inner xsep=0.01cm, inner ysep=0.03cm] at (0, 0) {@}; + +\node (TimesAp) + [xshift=0.25cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=DoDo4, left of=DoDo4] + {@}; + +\node (Times) + [xshift=0.25cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=TimesAp, left of=TimesAp] + {$+$}; + +\node (Do4) + [xshift=-0.5cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=DoDo4, + right of=DoDo4, yshift=-0.5cm] + {@}; + +\node (Do_2) + [xshift=0.25cm, inner xsep=0.04cm, inner ysep=0.05cm, below of=Do4, left of=Do4, xshift=2] + {double}; + +\node (4) + [xshift=-0.25cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=Do4, right of=Do4, xshift=-2] + {4}; + +\draw[->] (DoDo4) to (TimesAp); +\draw[->] (TimesAp) to (Times); +\draw[->] (TimesAp) |- (Do4); +\draw[->] (DoDo4) to (Do4); +\draw[->] (Do4) to (Do_2); +\draw[->] (Do4) to (4); + +\end{scope} diff --git a/diagrams/template/step4.tex b/diagrams/template/step4.tex new file mode 100644 index 0000000..c89187f --- /dev/null +++ b/diagrams/template/step4.tex @@ -0,0 +1,38 @@ +\begin{scope}[node distance=0.75cm] + +\node (DoDo4) [inner xsep=0.01cm, inner ysep=0.03cm] at (0, 0) {@}; + +\node (TimesAp) + [xshift=0.25cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=DoDo4, left of=DoDo4] + {@}; + +\node (Times) + [xshift=0.25cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=TimesAp, left of=TimesAp] + {$+$}; + +\node (Times44) + [xshift=-0.5cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=DoDo4, right of=DoDo4, yshift=-0.5cm] + {@}; + +\node (4) + [xshift=-0.25cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=Times44, right of=Times44, yshift=-0.75cm] + {4}; + +\node (Times4) + [xshift=0.25cm, inner xsep=0.04cm, inner ysep=0.05cm, below of=Times44, left of=Times44, xshift=2] + {@}; + +\node (Times2) + [xshift=0.25cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=Times4, left of=Times4] + {$+$}; + +\draw[->] (DoDo4) to (TimesAp); +\draw[->] (TimesAp) to (Times); +\draw[->] (TimesAp) to (Times44); +\draw[->] (DoDo4) to (Times44); +\draw[->] (Times44) to (Times4); +\draw[->] (Times4) to (Times2); +\draw[->] (Times4) |- (4); +\draw[->] (Times44) to (4); + +\end{scope} diff --git a/diagrams/template/step4red.tex b/diagrams/template/step4red.tex new file mode 100644 index 0000000..b6302cb --- /dev/null +++ b/diagrams/template/step4red.tex @@ -0,0 +1,38 @@ +\begin{scope}[node distance=0.75cm] + +\node (DoDo4) [inner xsep=0.01cm, inner ysep=0.03cm] at (0, 0) {@}; + +\node (TimesAp) + [xshift=0.25cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=DoDo4, left of=DoDo4] + {@}; + +\node (Times) + [xshift=0.25cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=TimesAp, left of=TimesAp] + {$+$}; + +\node (Times44) + [xshift=-0.5cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=DoDo4, right of=DoDo4, yshift=-0.5cm, color=blue] + {@}; + +\node (4) + [xshift=-0.25cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=Times44, right of=Times44, yshift=-0.75cm, color=blue] + {4}; + +\node (Times4) + [xshift=0.25cm, inner xsep=0.04cm, inner ysep=0.05cm, below of=Times44, left of=Times44, xshift=2, color=blue] + {@}; + +\node (Times2) + [xshift=0.25cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=Times4, left of=Times4, color=blue] + {$+$}; + +\draw[->] (DoDo4) to (TimesAp); +\draw[->] (TimesAp) to (Times); +\draw[->] (TimesAp) to (Times44); +\draw[->] (DoDo4) to (Times44); +\draw[->,color=blue,dashed] (Times44) to (Times4); +\draw[->,color=blue,dashed] (Times4) to (Times2); +\draw[->,color=blue,dashed] (Times4) |- (4); +\draw[->,color=blue,dashed] (Times44) to (4); + +\end{scope} diff --git a/diagrams/template/step5.tex b/diagrams/template/step5.tex new file mode 100644 index 0000000..0484775 --- /dev/null +++ b/diagrams/template/step5.tex @@ -0,0 +1,22 @@ +\begin{scope}[node distance=0.75cm] + +\node (DoDo4) [inner xsep=0.01cm, inner ysep=0.03cm] at (0, 0) {@}; + +\node (TimesAp) + [xshift=0.25cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=DoDo4, left of=DoDo4] + {@}; + +\node (Times) + [xshift=0.25cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=TimesAp, left of=TimesAp] + {$+$}; + +\node (8) + [xshift=-0.5cm, inner xsep=0.01cm, inner ysep=0.03cm, below of=DoDo4, right of=DoDo4, yshift=-0.75cm] + {8}; + +\draw[->] (DoDo4) to (TimesAp); +\draw[->] (TimesAp) to (Times); +\draw[->] (TimesAp) |- (8); +\draw[->] (DoDo4) to (8); + +\end{scope} diff --git a/diagrams/tt/prodfx.tex b/diagrams/tt/prodfx.tex new file mode 100644 index 0000000..c2febd2 --- /dev/null +++ b/diagrams/tt/prodfx.tex @@ -0,0 +1,12 @@ +\node (gamma) at (0, 2) {$\Gamma$}; + +\node (prod) at (0, 0) {$\beta^\alpha \times \alpha$}; + +\node (arg) at (2.5, 0) {$\alpha$}; +\node (func) at (-2.5, 0) {$\beta^\alpha$}; + +\draw[->] (gamma) -- node[midway] {$\left$} (prod); +\draw[->] (prod) -- node[midway] {$\pi_1$} (func); +\draw[->] (prod) -- node[midway,below] {$\pi_2$} (arg); +\draw[->] (gamma) -- node[midway] {$f$} (func); +\draw[->] (gamma) -- node[midway] {$x$} (arg); \ No newline at end of file diff --git a/hie.yaml b/hie.yaml new file mode 100644 index 0000000..142e69f --- /dev/null +++ b/hie.yaml @@ -0,0 +1,2 @@ +cradle: + stack: \ No newline at end of file diff --git a/pages/contact.md b/pages/contact.md new file mode 100644 index 0000000..8d377ba --- /dev/null +++ b/pages/contact.md @@ -0,0 +1,42 @@ +--- +title: Contact +--- +Here are the easiest ways to reach me: + + + +
+ +
+
+ @plt_amy +
+ + + I am unhealthily active on the bird website, so follow me on Twitter to stay up to date with what I think about.. everything! + +
+ +
+
+ {ames} +
+ + + I'm active in `##dependent` on [libera.chat](https://libera.chat) to talk about types! + +
+ +
+ +If you like what I do, here are some ways you can support this blog: + +
+
+Ko-fi +You can send me a one-time donation on Ko-Fi. Just remember not to read the name on the receipt! +(Paypal sucks) +
+
diff --git a/pages/index.html b/pages/index.html new file mode 100644 index 0000000..1f8edc6 --- /dev/null +++ b/pages/index.html @@ -0,0 +1,83 @@ +--- +title: Home +--- + +
+
+

Hi!

+ +

+ +profile picture + +I'm Amélia, a non-binary (they/them) mathematician & programmer. This +blog is where I write about programming languages: their implementation, +their semantics, etc. +

+ +
+ +
+ + + + + + + + + + + + + + + + + + + + +
+ +
+

+ +cube + +In addition to this blog, I maintain the 1Lab, a formalised, cross-linked +reference resource for Homotopy Type Theory, done in Cubical Agda. +

+ +
+

+ +amulet + +My most significant project other than this blog and the 1lab is Amulet, a functional programming +language in the ML tradition with support for advanced type-level +programming. +

+ +
+ +
+

Posts

+ +

+Here are the lastest 5 posts from the blog: +

+ +$partial("templates/post-list.html")$ + +

…or you can find more in the archives.

+
+
+ + \ No newline at end of file diff --git a/pages/oss.md b/pages/oss.md new file mode 100644 index 0000000..6d3a658 --- /dev/null +++ b/pages/oss.md @@ -0,0 +1,13 @@ +--- +title: Open-source Licenses +--- + + + +This blog redistributes (parts of) the following free software projects: + +* **KaTeX** is a fast JavaScript library for rendering LaTeX on the client. I use it to pre-generate amazing looking mathematics at compile time. **KaTeX is licensed under the terms of the MIT license; A copy is available [here](/static/licenses/LICENSE.KaTeX)**. + +* **Iosevka** is a customizable monospace font designed for programmers. It's used in this website for code blocks, and also for any Agda developments I've shared under the amelia.how domain. **Iosevka is distributed under the terms of the SIL Open Font License; A copy is available [here](/static/licenses/LICENSE.Iosevka).** \ No newline at end of file diff --git a/pages/posts/.2020-09-09-typing-proof.md b/pages/posts/.2020-09-09-typing-proof.md new file mode 100644 index 0000000..0090a66 --- /dev/null +++ b/pages/posts/.2020-09-09-typing-proof.md @@ -0,0 +1,164 @@ +--- +title: "This Sentence is False, or: On Natural Language, Typing and Proof" +date: September 9th, 2020 +--- + +The Liar's paradox is often the first paradox someone dealing with logic, even in an informal setting, encounters. It is _intuitively_ paradoxical: how can a sentence be both true, and false? This contradicts (ahem) the law of non-contradiction, that states that "no proposition is both true and false", or, symbolically, $\neg (A \land \neg A)$. Appealing to symbols like that gives us warm fuzzy feelings, because, _of course, the algebra doesn't lie!_ + +There's a problem with that the appeal to symbols, though. And it's nothing to do with non-contradiction: It's to do with well-formedness. How do you accurately translate the "this sentence is false" sentence into a logical formula? We can try by giving it a name, say $L$ (for liar), and state that $L$ must represent some logical formula. Note that the equality symbol $=$ here is _not_ a member of the logic we're using to express $L$, it's a symbol of this discourse. It's _meta_​logical. + +$$ L = \dots $$ + +But what should fill in the dots? $L$ is the sentence we're symbolising, so "this sentence" must mean $L$. Saying "X is false" can be notated in a couple of equivalent ways, such as $\neg X$ or $X \to \bot$. We'll go with the latter: it's a surprise tool that will help us later. Now we know how to fill in the dots: It's $L \to \bot$. + +
+Truth tables demonstrating the equivalence between $\neg A$ and $A \to \bot$, if you are classically inclined. +
+ + + + + + + +
$A$ $\neg A$
$\top$$\bot$
$\bot$$\top$
+ + + + + + + +
$A$ $A\to\bot$
$\top$$\bot$
$\bot$$\top$
+
+
+ +But wait. If $L = L \to \bot$, then $L = (L \to \bot) \to \bot$, and also $L = ((L \to \bot) \to \bot) \to \bot$, and so... forever. There is no finite, well-formed formula of first-order logic that represents the sentence "This sentence is false", thus, assigning a truth value to it is meaningless: Saying "This sentence is false" is true is just as valid as saying that it's false, both of those are as valid as saying "$\neg$ is true". + +Wait some more, though: we're not done. It's known, by the [Curry-Howard isomorphism], that logical systems correspond to type systems. Therefore, if we can find a type-system that assigns a meaning to our sentence $L$, then there _must_ exist a logical system that can express $L$, and so, we can decide its truth! + +Even better, we don't need to analyse the truth of $L$ logically, we can do it type-theoretically: if we can build an inhabitant of $L$, then it is true; If we can build an inhabitant of $\neg L$, then it's false; And otherwise, I'm just not smart enough to do it. + +So what is the smallest type system that lets us assign a meaning to $L$? + +# A system of equirecursive types: $\lambda_{\text{oh no}}$[^1] + +[^1]: The reason for the name will become obvious soon enough. + +We do not need a complex type system to express $L$: a simple extension over the basic simply-typed lambda calculus $\lambda_{\to}$ will suffice. No fancy higher-ranked or dependent types here, sorry! + +As a refresher, the simply-typed lambda calculus has _only_: + +* A set of base types $\mathbb{B}$, +* Function types $\tau \to \sigma$, +* For each base type $b \in \mathbb{B}$, a set of base terms $\mathbb{T}_b$, +* Variables $v$, +* Lambda abstractions $\lambda v. e$, and +* Application $e\ e'$. + +
+Type assignment rules for the basic $\lambda_{\to}$ calculus. +
+
+$$\frac{x : \tau \in \Gamma}{\Gamma \vdash x : \tau}$$ +
+
+$$\frac{b \in \mathbb{B} \quad x \in \mathbb{T}_{b}}{\Gamma \vdash x : b}$$ +
+
+$$\frac{\Gamma, x : \sigma \vdash e : \tau}{\Gamma \vdash \lambda x. e : \sigma \to \tau}$$ +
+
+$$\frac{\Gamma, e : \sigma \to \tau \quad \Gamma \vdash e' : \sigma}{\Gamma \vdash e\ e' : \tau}$$ +
+
+
+ +First of all, we'll need a type to represent the logical proposition $\bot$. This type is empty: It has no type formers. Its elimination rule corresponds to the principle of explosion, and we write it $\mathtt{absurd}$. The inference rule: + +
+$$\frac{\Gamma \vdash e : \bot}{\mathtt{absurd}\ e : A}$$ +
+ +We're almost there. What we need now is a type former that serves as a solution for equations of the form $v = ... v ...$. That's right: we're just _inventing_ a solution to this class of equations---maths! + +These are the _equirecursive_ types, $\mu a. \tau$. The important part here is _equi_: these types are entirely indistinguishable from their unrollings. Formally, we extend the set of type formers with type variables $a$ and $\mu$-types $\mu a. \tau$, where $\mu a$ acts as a binder for $a$. + +Since we invented $\mu$ types as a solution for equations of the form $a = \tau$, we have that $\mu a. \tau = \tau[\mu a.\tau/a]$, where $\tau[\sigma{}/a]$ means "substitute $\sigma{}$ everywhere $a$ occurs in $\tau$". The typing rules express this identity, saying that anywhere a term might have one as a type, the other works too: + +
+
+$$\frac{\Gamma \vdash e : \tau[\mu a.\tau / a]}{\Gamma \vdash e : \mu a. \tau}$$ +
+
+$$\frac{\Gamma \vdash e : \mu a.\tau}{\Gamma \vdash e : \tau[\mu a. \tau / a]}$$ +
+
+ +Adding these rules, along with the one for eliminating $\bot$, to the $\lambda_{\to}$ calculus nets us the system $\lambda_{\text{oh no}}$. With it, one can finally formulate a representation for our $L$-sentence: it's $\mu a. a \to \bot$. + +There exists a closed term of this type, namely $\lambda k. k\ k$, which means: The "this sentence is false"-sentence is true. We can check this fact ourselves, or, more likely, use a type checker that supports equirecursive types. For example, OCaml with the `-rectypes` compiler option does. + +We'll first define the empty type `void` and the type corresponding to $L$: + +
+~~~~{.ocaml} +type void = | ;; +type l = ('a -> void) as 'a ;; +~~~~ +
+ +Now we can define our proof of $L$, called `yesl`, and check that it has the expected type: + +
+~~~~{.ocaml} +let yesl: l = fun k -> k k ;; +~~~~ +
+ +However. This same function is also a proof that... $\neg L$. Check it out: + +
+~~~~{.ocaml} +let notl (x : l) : void = x x ;; +~~~~ +
+ +# I am Bertrand Russell + +Bertrand Russell (anecdotally) once proved, starting from $1 = 0$, that he was the Pope. I am also the Pope, as it turns out, since I have on hand a proof that $L$ and $\neg L$, in violation of non-contradiction; By transitivity, I am Bertrand Russell. $\blacksquare$ + +Alright, maybe I'm not Russell (drat). But I am, however, a trickster. I tricked you! You thought that this post was going to be about a self-referential sentence, but it was actually about typed programming language design (not very shocking, I know). It's a demonstration of how recursive types (in any form) are logically inconsistent, and of how equirecursive types _are wrong_. + +The logical inconsistency, we all deal with, on a daily basis. It comes with Turing completeness, and it annoys me to no end every single time I accidentally do `let x = ... x ...`{.haskell}. I _really_ wish I had a practical, total functional programming language to use for my day-to-day programming, and this non-termination _everywhere_ is a great big blotch on Haskell's claim of purity. + +The kind of recursive types you get in Haskell is _fine_. They're not _great_ if you like the propositions-as-types interpretation, since it's trivial to derive a contradiction from them, but they're good enough for programming that implementing a positivity checker to ensure your definitions are strictly inductive isn't generally worth the effort. + +Unless your language claims to have "zero runtime errors", in which case, if you implement isorecursive types instead of inductive types, you are _wrong_. See: Elm. God damn it. + +
+So much for "no runtime errors"... I guess spinning forever on the client side is acceptable. +
+ +```elm +-- Elm +type Void = Void Void +type Omega = Omega (Omega -> Void) + +yesl : Omega +yesl = Omega (\(Omega x) -> x (Omega x)) + +notl : Omega -> Void +notl (Omega x) = x (Omega x) +``` + +
+
+ +Equirecursive types, however, are a totally different beast. They are _basically_ useless. Sure, you might not have to write a couple of constructors, here and there... at the cost of _dramatically_ increasing the set of incorrect programs that your type system accepts. Suddenly, typos will compile fine, and your program will just explode at runtime (more likely: fail to terminate). Isn't this what type systems are meant to prevent? + +Thankfully, very few languages implement equirecursive types. OCaml is the only one I know of, and it's gated behind a compiler flag. However, that's a footgun that should _not_ be there. + +**EDIT** (April 14th, 2021) It's been pointed out to me that you can get equirecursive types in OCaml even without passing `-rectypes` to the compiler. I am not an OCaml expert, so I encourage you to see [here](https://gist.github.com/drvink/a0094680aaae2569951ea4601752944d) for more details. + +[Curry-Howard isomorphism]: https://en.wikipedia.org/wiki/Curry%E2%80%93Howard_correspondence diff --git a/pages/posts/2016-08-17-parsec.md b/pages/posts/2016-08-17-parsec.md new file mode 100644 index 0000000..07c54f4 --- /dev/null +++ b/pages/posts/2016-08-17-parsec.md @@ -0,0 +1,309 @@ +--- +title: You could have invented Parsec +date: August 17, 2016 01:29 AM +synopsys: 2 +--- + +As most of us should know, [Parsec](https://hackage.haskell.org/package/parsec) +is a relatively fast, lightweight monadic parser combinator library. + +In this post I aim to show that monadic parsing is not only useful, but a simple +concept to grok. + +We shall implement a simple parsing library with instances of common typeclasses +of the domain, such as Monad, Functor and Applicative, and some example +combinators to show how powerful this abstraction really is. + +--- + +Getting the buzzwords out of the way, being _monadic_ just means that Parsers +instances of `Monad`{.haskell}. Recall the Monad typeclass, as defined in +`Control.Monad`{.haskell}, + +```haskell +class Applicative m => Monad m where + return :: a -> m a + (>>=) :: m a -> (a -> m b) -> m b + {- Some fields omitted -} +``` + +How can we fit a parser in the above constraints? To answer that, we must first +define what a parser _is_. + +A naïve implementation of the `Parser`{.haskell} type would be a simple type +synonym. + +```haskell +type Parser a = String -> (a, String) +``` + +This just defines that a parser is a function from a string to a result pair +with the parsed value and the resulting stream. This would mean that parsers are +just state transformers, and if we define it as a synonym for the existing mtl +`State`{.haskell} monad, we get the Monad, Functor and Applicative instances for +free! But alas, this will not do. + +Apart from modeling the state transformation that a parser expresses, we need a +way to represent failure. You already know that `Maybe a`{.haskell} expresses +failure, so we could try something like this: + +```haskell +type Parser a = String -> Maybe (a, String) +``` + +But, as you might have guessed, this is not the optimal representation either: +`Maybe`{.haskell} _does_ model failure, but in a way that is lacking. It can +only express that a computation was successful or that it failed, not why it +failed. We need a way to fail with an error message. That is, the +`Either`{.haskell} monad. + +```haskell +type Parser e a = String -> Either e (a, String) +``` + +Notice how we have the `Maybe`{.haskell} and `Either`{.haskell} outside the +tuple, so that when an error happens we stop parsing immediately. We could +instead have them inside the tuple for better error reporting, but that's out of +scope for a simple blag post. + +This is pretty close to the optimal representation, but there are still some +warts things to address: `String`{.haskell} is a bad representation for textual +data, so ideally you'd have your own `Stream`{.haskell} class that has instances +for things such as `Text`{.haskell}, `ByteString`{.haskell} and +`String`{.haskell}. + +One issue, however, is more glaring: You _can't_ define typeclass instances for +type synonyms! The fix, however, is simple: make `Parser`{.haskell} a newtype. + +```haskell +newtype Parser a + = Parser { parse :: String -> Either String (a, String) } +``` + +--- + +Now that that's out of the way, we can actually get around to instancing some +typeclasses. + +Since the AMP landed in GHC 7.10 (base 4.8), the hierarchy of the Monad +typeclass is as follows: + +```haskell +class Functor (m :: * -> *) where +class Functor m => Applicative m where +class Applicative m => Monad m where +``` + +That is, we need to implement Functor and Applicative before we can actually +implement Monad. + +We shall also add an `Alternative`{.haskell} instance for expressing choice. + +First we need some utility functions, such as `runParser`{.haskell}, that runs a +parser from a given stream. + +```haskell +runParser :: Parser a -> String -> Either String a +runParser (Parser p) s = fst <$> p s +``` + +We could also use function for modifying error messages. For convenience, we +make this an infix operator, ``{.haskell}. + +```haskell +() :: Parser a -> String -> Parser a +(Parser p) err = Parser go where + go s = case p s of + Left _ -> Left err + Right x -> return x +infixl 2 +``` + + +`Functor` +======= + +Remember that Functor models something that can be mapped over (technically, +`fmap`-ed over). + +We need to define semantics for `fmap` on Parsers. A sane implementation would +only map over the result, and keeping errors the same. This is a homomorphism, +and follows the Functor laws. + +However, since we can't modify a function in place, we need to return a new +parser that applies the given function _after_ the parsing is done. + +```haskell +instance Functor Parser where + fn `fmap` (Parser p) = Parser go where + go st = case p st of + Left e -> Left e + Right (res, str') -> Right (fn res, str') +``` + +### `Applicative` + +While Functor is something that can be mapped over, Applicative defines +semantics for applying a function inside a context to something inside a +context. + +The Applicative class is defined as + +```haskell +class Functor m => Applicative m where + pure :: a -> m a + (<*>) :: f (a -> b) -> f a -> f b +``` + +Notice how the `pure`{.haskell} and the `return`{.haskell} methods are +equivalent, so we only have to implement one of them. + +Let's go over this by parts. + +```haskell +instance Applicative Parser where + pure x = Parser $ \str -> Right (x, str) +``` + +The `pure`{.haskell} function leaves the stream untouched, and sets the result +to the given value. + +The `(<*>)`{.haskell} function needs to to evaluate and parse the left-hand side +to get the in-context function to apply it. + +```haskell + (Parser p) <*> (Parser p') = Parser go where + go st = case p st of + Left e -> Left e + Right (fn, st') -> case p' st' of + Left e' -> Left e' + Right (v, st'') -> Right (fn v, st'') +``` + +### `Alternative` + +Since the only superclass of Alternative is Applicative, we can instance it +without a Monad instance defined. We do, however, need an import of +`Control.Applicative`{.haskell}. + +```haskell +instance Alternative Parser where + empty = Parser $ \_ -> Left "empty parser" + (Parser p) <|> (Parser p') = Parser go where + go st = case p st of + Left _ -> p' st + Right x -> Right x +``` + +### `Monad` + +After almost a thousand words, one would be excused for forgetting we're +implementing a _monadic_ parser combinator library. That means, we need an +instance of the `Monad`{.haskell} typeclass. + +Since we have an instance of Applicative, we don't need an implementation of +return: it is equivalent to `pure`, save for the class constraint. + +```haskell +instance Monad Parser where + return = pure +``` + + +The `(>>=)`{.haskell} implementation, however, needs a bit more thought. Its +type signature is + +```haskell +(>>=) :: m a -> (a -> m b) -> m b +``` + +That means we need to extract a value from the Parser monad and apply it to the +given function, producing a new Parser. + +```haskell + (Parser p) >>= f = Parser go where + go s = case p s of + Left e -> Left e + Right (x, s') -> parse (f x) s' +``` + +While some people think that the `fail`{.haskell} is not supposed to be in the +Monad typeclass, we do need an implementation for when pattern matching fails. +It is also convenient to use `fail`{.haskell} for the parsing action that +returns an error with a given message. + +```haskell + fail m = Parser $ \_ -> Left m +``` + +--- + +We now have a `Parser`{.haskell} monad, that expresses a parsing action. But, a +parser library is no good when actual parsing is made harder than easier. To +make parsing easier, we define _combinators_, functions that modify a parser in +one way or another. + +But first, we should get some parsing functions. + +### any, satisfying + +`any` is the parsing action that pops a character off the stream and returns +that. It does no further parsing at all. + +```haskell +any :: Parser Char +any = Parser go where + go [] = Left "any: end of file" + go (x:xs) = Right (x,xs) +``` + +`satisfying` tests the parsed value against a function of type `Char -> +Bool`{.haskell} before deciding if it's successful or a failure. + +```haskell +satisfy :: (Char -> Bool) -> Parser Char +satisfy f = d + x <- any + if f x + then return x + else fail "satisfy: does not satisfy" +``` + +We use the `fail`{.haskell} function defined above to represent failure. + +### `oneOf`, `char` + +These functions are defined in terms of `satisfying`, and parse individual +characters. + +```haskell +char :: Char -> Parser Char +char c = satisfy (c ==) "char: expected literal " ++ [c] + +oneOf :: String -> Parser Char +oneOf s = satisfy (`elem` s) "oneOf: expected one of '" ++ s ++ "'" +``` + +### `string` + +This parser parses a sequence of characters, in order. + +```haskell +string :: String -> Parser String +string [] = return [] +string (x:xs) = do + char x + string xs + return $ x:xs +``` + +--- + +And that's it! In a few hundred lines, we have built a working parser combinator +library with Functor, Applicative, Alternative, and Monad instances. While it's +not as complex or featureful as Parsec in any way, it is powerful enough to +define grammars for simple languages. + +[A transcription](/static/Parser.hs) ([with syntax +highlighting](/static/Parser.hs.html)) of this file is available as runnable +Haskell. The transcription also features some extra combinators for use. diff --git a/pages/posts/2016-08-23-hasochism.lhs b/pages/posts/2016-08-23-hasochism.lhs new file mode 100644 index 0000000..ece56ba --- /dev/null +++ b/pages/posts/2016-08-23-hasochism.lhs @@ -0,0 +1,332 @@ +--- +title: Dependent types in Haskell - Sort of +date: August 23, 2016 +synopsys: 2 +--- + +**Warning**: An intermediate level of type-fu is necessary for understanding +*this post. + +The glorious Glasgow Haskell Compilation system, since around version 6.10 has +had support for indexed type familes, which let us represent functional +relationships between types. Since around version 7, it has also supported +datatype-kind promotion, which lifts arbitrary data declarations to types. Since +version 8, it has supported an extension called `TypeInType`, which unifies the +kind and type level. + +With this in mind, we can implement the classical dependently-typed example: +Length-indexed lists, also called `Vectors`{.haskell}. + +---- + +> {-# LANGUAGE TypeInType #-} + +`TypeInType` also implies `DataKinds`, which enables datatype promotion, and +`PolyKinds`, which enables kind polymorphism. + +`TypeOperators` is needed for expressing type-level relationships infixly, and +`TypeFamilies` actually lets us define these type-level functions. + +> {-# LANGUAGE TypeOperators #-} +> {-# LANGUAGE TypeFamilies #-} + +Since these are not simple-kinded types, we'll need a way to set their kind +signatures[^kind] explicitly. We'll also need Generalized Algebraic Data Types +(or GADTs, for short) for defining these types. + +> {-# LANGUAGE KindSignatures #-} +> {-# LANGUAGE GADTs #-} + +Since GADTs which couldn't normally be defined with regular ADT syntax can't +have deriving clauses, we also need `StandaloneDeriving`. + +> {-# LANGUAGE StandaloneDeriving #-} + +> module Vector where +> import Data.Kind + +---- + +Natural numbers +=============== + +We could use the natural numbers (and singletons) implemented in `GHC.TypeLits`, +but since those are not defined inductively, they're painful to use for our +purposes. + +Recall the definition of natural numbers proposed by Giuseppe Peano in his +axioms: **Z**ero is a natural number, and the **s**uccessor of a natural number +is also a natural number. + +If you noticed the bold characters at the start of the words _zero_ and +_successor_, you might have already assumed the definition of naturals to be +given by the following GADT: + +< data Nat where +< Z :: Nat +< S :: Nat -> Nat + +This is fine if all you need are natural numbers at the _value_ level, but since +we'll be parametrising the Vector type with these, they have to exist at the +type level. The beauty of datatype promotion is that any promoted type will +exist at both levels: A kind with constructors as its inhabitant types, and a +type with constructors as its... constructors. + +Since we have TypeInType, this declaration was automatically lifted, but we'll +use explicit kind signatures for clarity. + +> data Nat :: Type where +> Z :: Nat +> S :: Nat -> Nat + +The `Type` kind, imported from `Data.Kind`, is a synonym for the `*` (which will +eventually replace the latter). + +Vectors +======= + +Vectors, in dependently-typed languages, are lists that apart from their content +encode their size along with their type. + +If we assume that lists can not have negative length, and an empty vector has +length 0, this gives us a nice inductive definition using the natural number +~~type~~ kind[^kinds] + + > 1. An empty vector of `a` has size `Z`{.haskell}. + > 2. Adding an element to the front of a vector of `a` and length `n` makes it + > have length `S n`{.haskell}. + +We'll represent this in Haskell as a datatype with a kind signature of `Nat -> +Type -> Type` - That is, it takes a natural number (remember, these were +automatically lifted to kinds), a regular type, and produces a regular type. +Note that, `->` still means a function at the kind level. + +> data Vector :: Nat -> Type -> Type where + +Or, without use of `Type`, + +< data Vector :: Nat -> * -> * where + +We'll call the empty vector `Nil`{.haskell}. Remember, it has size +`Z`{.haskell}. + +> Nil :: Vector Z a + +Also note that type variables are implicit in the presence of kind signatures: +They are assigned names in order of appearance. + +Consing onto a vector, represented by the infix constructor `:|`, sets its +length to the successor of the existing length, and keeps the type of elements +intact. + +> (:|) :: a -> Vector x a -> Vector (S x) a + +Since this constructor is infix, we also need a fixidity declaration. For +consistency with `(:)`, cons for regular lists, we'll make it right-associative +with a precedence of `5`. + +> infixr 5 :| + +We'll use derived `Show`{.haskell} and `Eq`{.haskell} instances for +`Vector`{.haskell}, for clarity reasons. While the derived `Eq`{.haskell} is +fine, one would prefer a nicer `Show`{.haskell} instance for a +production-quality library. + +> deriving instance Show a => Show (Vector n a) +> deriving instance Eq a => Eq (Vector n a) + +Slicing up Vectors {#slicing} +================== + +Now that we have a vector type, we'll start out by implementing the 4 basic +operations for slicing up lists: `head`, `tail`, `init` and `last`. + +Since we're working with complicated types here, it's best to always use type +signatures. + +Head and Tail {#head-and-tail} +------------- + +Head is easy - It takes a vector with length `>1`, and returns its first +element. This could be represented in two ways. + +< head :: (S Z >= x) ~ True => Vector x a -> a + +This type signature means that, if the type-expression `S Z >= x`{.haskell} +unifies with the type `True` (remember - datakind promotion at work), then head +takes a `Vector x a` and returns an `a`. + +There is, however, a much simpler way of doing the above. + +> head :: Vector (S x) a -> a + +That is, head takes a vector whose length is the successor of a natural number +`x` and returns its first element. + +The implementation is just as concise as the one for lists: + +> head (x :| _) = x + +That's it. That'll type-check and compile. + +Trying, however, to use that function on an empty vector will result in a big +scary type error: + +```plain +Vector> Vector.head Nil + +:1:13: error: + • Couldn't match type ‘'Z’ with ‘'S x0’ + Expected type: Vector ('S x0) a + Actual type: Vector 'Z a + • In the first argument of ‘Vector.head’, namely ‘Nil’ + In the expression: Vector.head Nil + In an equation for ‘it’: it = Vector.head Nil +``` + +Simplified, it means that while it was expecting the successor of a natural +number, it got zero instead. This function is total, unlike the one in +`Data.List`{.haskell}, which fails on the empty list. + +< head [] = error "Prelude.head: empty list" +< head (x:_) = x + +Tail is just as easy, except in this case, instead of discarding the predecessor +of the vector's length, we'll use it as the length of the resulting vector. + +This makes sense, as, logically, getting the tail of a vector removes its first +length, thus "unwrapping" a level of `S`. + +> tail :: Vector (S x) a -> Vector x a +> tail (_ :| xs) = xs + +Notice how neither of these have a base case for empty vectors. In fact, adding +one will not typecheck (with the same type of error - Can't unify `Z`{.haskell} +with `S x`{.haskell}, no matter how hard you try.) + +Init {#init} +---- + +What does it mean to take the initial of an empty vector? That's obviously +undefined, much like taking the tail of an empty vector. That is, `init` and +`tail` have the same type signature. + +> init :: Vector (S x) a -> Vector x a + +The `init` of a singleton list is nil. This type-checks, as the list would have +had length `S Z` (that is - 1), and now has length `Z`. + +> init (x :| Nil) = Nil + +To take the init of a vector with more than one element, all we do is recur on +the tail of the list. + +> init (x :| y :| ys) = x :| Vector.init (y :| ys) + +That pattern is a bit weird - it's logically equivalent to `(x :| +xs)`{.haskell}. But, for some reason, that doesn't make the typechecker happy, +so we use the long form. + +Last {#last} +---- + +Last can, much like the list version, be implemented in terms of a left fold. +The type signature is like the one for head, and the fold is the same as that +for lists. The foldable instance for vectors is given [here](#Foldable). + +> last :: Vector (S x) a -> a +> last = foldl (\_ x -> x) impossible where + +Wait - what's `impossible`? Since this is a fold, we do still need an initial +element - We could use a pointful fold with the head as the starting point, but +I feel like this helps us to understand the power of dependently-typed vectors: +That error will _never_ happen. Ever. That's why it's `impossible`! + +> impossible = error "Type checker, you have failed me!" + +That's it for the basic vector operations. We can now slice a vector anywhere +that makes sense - Though, there's one thing missing: `uncons`. + +Uncons {#uncons} +------ + +Uncons splits a list (here, a vector) into a pair of first element and rest. +With lists, this is generally implemented as returning a `Maybe`{.haskell} type, +but since we can encode the type of a vector in it's type, there's no need for +that here. + +> uncons :: Vector (S x) a -> (a, Vector x a) +> uncons (x :| xs) = (x, xs) + +Mapping over Vectors {#functor} +==================== + +We'd like a `map` function that, much like the list equivalent, applies a +function to all elements of a vector, and returns a vector with the same length. +This operation should hopefully be homomorphic: That is, it keeps the structure +of the list intact. + +The `base` package has a typeclass for this kind of morphism, can you guess what +it is? If you guessed Functor, then you're right! If you didn't, you might +aswell close the article now - Heavy type-fu inbound, though not right now. + +The functor instance is as simple as can be: + +> instance Functor (Vector x) where + +The fact that functor expects something of kind `* -> *`, we need to give the +length in the instance head - And since we do that, the type checker guarantees +that this is, in fact, a homomorphic relationship. + +Mapping over `Nil` just returns `Nil`. + +> f `fmap` Nil = Nil + +Mapping over a list is equivalent to applying the function to the first element, +then recurring over the tail of the vector. + +> f `fmap` (x :| xs) = f x :| (fmap f xs) + +We didn't really need an instance of Functor, but I think standalone map is +silly. + +Folding Vectors {#foldable} +=============== + +The Foldable class head has the same kind signature as the Functor class head: +`(* -> *) -> Constraint` (where `Constraint` is the kind of type classes), that +is, it's defined by the class head + +< class Foldable (t :: Type -> Type) where + +So, again, the length is given in the instance head. + +> instance Foldable (Vector x) where +> foldr f z Nil = z +> foldr f z (x :| xs) = f x $ foldr f z xs + +This is _exactly_ the Foldable instance for `[a]`, except the constructors are +different. Hopefully, by now you've noticed that Vectors have the same +expressive power as lists, but with more safety enforced by the type checker. + +Conclusion +========== + +Two thousand words in, we have an implementation of functorial, foldable vectors +with implementations of `head`, `tail`, `init`, `last` and `uncons`. Since +going further (implementing `++`, since a Monoid instance is impossible) would +require implementing closed type familes, we'll leave that for next time. + +Next time, we'll tackle the implementation of `drop`, `take`, `index` (`!!`, but +for vectors), `append`, `length`, and many other useful list functions. +Eventually, you'd want an implementation of all functions in `Data.List`. We +shall tackle `filter` in a later issue. + +[^kind]: You can read about [Kind polymorphism and +Type-in-Type](https://downloads.haskell.org/~ghc/latest/docs/html/users_guide/glasgow_exts.html#kind-polymorphism-and-type-in-type) +in the GHC manual. + +[^kinds]: The TypeInType extension unifies the type and kind level, but this +article still uses the word `kind` throughout. This is because it's easier to +reason about types, datatype promotion and type familes if you have separate +type and kind levels. diff --git a/pages/posts/2016-08-26-parsec2.lhs b/pages/posts/2016-08-26-parsec2.lhs new file mode 100644 index 0000000..f575112 --- /dev/null +++ b/pages/posts/2016-08-26-parsec2.lhs @@ -0,0 +1,173 @@ +--- +title: Monadic Parsing with User State +date: August 26, 2016 +synopsys: 2 +--- + +> {-# LANGUAGE FlexibleInstances, MultiParamTypeClasses #-} +> module StatefulParsing where +> import Control.Monad.State.Class +> import Control.Applicative + +In this post I propose an extension to the monadic parser framework +introduced in a previous post, _[You could have invented +Parsec](/posts/2016-08-17.html)_, that extends +the parser to also support embedded user state in your parsing. + +This could be used, for example, for parsing a language with +user-extensible operators: The precedences and fixidities of operators +would be kept in a hashmap threaded along the bind chain. + +Instead of posing these changes as diffs, we will rewrite the parser +framework from scratch with the updated type. + +--- + +Parser `newtype`{.haskell} +========================= + +Our new parser is polymorphic in both the return type and the user state +that, so we have to update the `newtype`{.haskell} declaration to match. + +> newtype Parser state result +> = Parser { runParser :: String +> -> state +> -> Either String (result, state, String) } + +Our tuple now contains the result of the parsing operation and the new +user state, along with the stream. We still need to supply a stream to +parse, and now also supply the initial state. This will be reflected in +our functions. + +For convenience, we also make a `Parser' a`{.haskell} type alias for +parsers with no user state. + +< type Parser' a = Parser () a + +Seeing as type constructors are also curried, we can apply η-reduction +to get the following, which is what we'll go +with. + +> type Parser' = Parser () + +`Functor`{.haskell} instance +============================ + +> instance Functor (Parser st) where + +The functor instance remains mostly the same, except now we have to +thread the user state around, too. + +The instance head also changes to fit the kind signature of the +`Functor`{.haskell} typeclass. Since user state can not change from +fmapping, this is fine. + +> fn `fmap` (Parser p) = Parser go where +> go st us = case p st us of +> Left e -> Left e +> Right (r, us', st') -> Right (fn r, us', st') + +As you can see, the new user state (`us'`) is just returned as is. + +`Applicative`{.haskell} instance +================================ + +> instance Applicative (Parser st) where + +The new implementations of `pure`{.haskell} and `<*>`{.haskell} need to +correctly manipulate the user state. In the case of `pure`, it's just passed +as-is to the `Right`{.haskell} constructor. + +> pure ret = Parser go where +> go st us = Right (ret, us, st) + +Since `(<*>)` needs to evaluate both sides before applying the function, we need +to pass the right-hand side's generated user state to the right-hand side for +evaluation. + +> (Parser f) <*> (Parser v) = Parser go where +> go st us = case f st us of +> Left e -> Left e +> Right (fn, us', st') -> case v st' us' of +> Left e -> Left e +> Right (vl, us'', st'') -> Right (fn vl, us'', st'') + + +`Monad`{.haskell} instance +========================== + +> instance Monad (Parser st) where + +Since we already have an implementation of `pure`{.haskell} from the Applicative +instance, we don't need to worry about an implementation of `return`. + +> return = pure + +The monad instance is much like the existing monad instance, except now we have +to give the updated parser state to the new computation. + +> (Parser p) >>= f = Parser go where +> go s u = case p s u of +> Left e -> Left e +> Right (x, u', s') -> runParser (f x) s' u' + +`MonadState`{.haskell} instance +=============================== + +> instance MonadState st (Parser st) where + +Since we now have a state transformer in the parser, we can make it an instance +of the MTL's `MonadState` class. + +The implementation of `put`{.haskell} must return `()` (the unit value), and +needs to replace the existing state with the supplied one. This operation can +not fail. + +Since this is a parsing framework, we also need to define how the stream is +going to be affected: In this case, it isn't. + +> put us' = Parser go where +> go st _ = Right ((), us', st) + +The `get`{.haskell} function returns the current user state, and leaves it +untouched. This operation also does not fail. + +> get = Parser go where +> go st us = Right (us, us, st) + +Since we're an instance of `MonadState`{.haskell}, we needn't an implementation +of `modify` and friends - They're given by the MTL. + +`Alternative`{.haskell} instance +================================ + +> instance Alternative (Parser st) where + +The `Alternative`{.haskell} instance uses the same state as it was given for +trying the next parse. + +The `empty`{.haskell} parser just fails unconditionally. + +> empty = Parser go where +> go _ _ = Left "empty parser" + +`(<|>)` will try both parsers in order, reusing both the state and the stream. + +> (Parser p) <|> (Parser q) = Parser go where +> go st us = case p st us of +> Left e -> q st us +> Right v -> Right v + +Conclusion +========== + +This was a relatively short post. This is because many of the convenience +functions defined in the previous post also work with this parser framework, if +you replace `Parser` with `Parser'`. You can now use `get`, `put` and `modify` +to work on the parser's user state. As a closing note, a convenience function +for running parsers with no state is given. + +> parse :: Parser' a -> String -> Either String a +> parse str = case runParser str () of +> Left e -> Left e +> Right (x, _, _) -> x diff --git a/pages/posts/2017-08-01-delimcc.md b/pages/posts/2017-08-01-delimcc.md new file mode 100644 index 0000000..4c5d4fa --- /dev/null +++ b/pages/posts/2017-08-01-delimcc.md @@ -0,0 +1,155 @@ +--- +title: Delimited Continuations, Urn and Lua +date: August 1, 2017 +--- + +As some of you might know, [Urn](https://squiddev.github.io/urn) is my +current pet project. This means that any potential upcoming blag posts +are going to involve it in some way or another, and that includes this +one. For the uninitiated, Urn is a programming language which compiles +to Lua[^1], in the Lisp tradition, with no clear ascendance: We take +inspiration from several Lisps, most notably Common Lisp and Scheme. + +As functional programmers at heart, we claim to be minimalists: Urn is +reduced to 12 core forms before compilation, with some of those being +redundant (e.g. having separate `define` and `define-macro` builtins +instead of indicating that the definition is a macro through +a parameter). On top of these primitives we build all our abstraction, +and one such abstraction is what this post is about: _Delimited +continuations_. + +Delimited continuations are a powerful control abstraction first +introduced by Matthias Felleisein[^2], initially meant as +a generalisation of several other control primitives such as +`call-with-current-continuation`{.scheme} from Scheme among others. +However, whereas `call-with-current-continuation`{.scheme} captures +a continuation representing the state of the entire program after that +point, delimited continuations only reify a slice of program state. In +this, they are cheaper to build and invoke, and as such may be used to +implement e.g. lightweight threading primitives. + +While this may sound rather limiting, there are very few constructs that +can simultaneously be implemented with +`call-with-current-continuation`{.scheme} without also being expressible +in terms of delimited continuations. The converse, however, is untrue. +While `call/cc`{.scheme} be used to implement any control abstraction, +it can't implement any _two_ control abstractions: the continuations it +reifies are uncomposable[^3]. + +### Delimited Continuations in Urn + +Our implementation of delimited continuations follows the Guile Scheme +tradition of two functions `call-with-prompt` and `abort-to-prompt`, +which are semantically equivalent to the more traditional +`shift`/`reset`. This is, however, merely an implementation detail, as +both schemes are available. + +We have decided to base our implementation on Lua's existing coroutine +machinery instead of implementing an ad-hoc solution especially for Urn. +This lets us reuse and integrate with existing Lua code, which is one of +the goals for the language. + +`call-with-prompt` is used to introduce a _prompt_ into scope, which +delimits a frame execution and sets up an abort handler with the +specified tag. Later on, calls to `abort-to-prompt` reify the rest of +the program slice's state and jump into the handler set up. + +```lisp +(call/p 'a-prompt-tag + (lambda () + ; code to run with the prompt + ) + (lambda (k) + ; abort handler + )) +``` + +One limitation of the current implementation is that the continuation, +when invoked, will no longer have the prompt in scope. A simple way to +get around this is to store the prompt tag and handler in values and use +`call/p`[^4] again instead of directly calling the continuation. + +Unfortunately, being implemented on top of Lua coroutines does bring one +significant disadvantage: The reified continuations are single-use. +After a continuation has reached the end of its control frame, there's +no way to make it go back, and there's no way to copy continuations +either (while we have a wrapper around coroutines, the coroutines +themselves are opaque objects, and there's no equivalent of +`string.dump`{.lua} to, for instance, decompile and recompile them) + +### Why? + +In my opinion (which, like it or not, is the opinion of the Urn team), +Guile-style delimited continuations provide a much better abstraction +than operating with Lua coroutines directly, which may be error prone +and feels out of place in a functional-first programming language. + +As a final motivating example, below is an in-depth explanation of +a tiny cooperative task scheduler. + +```lisp +(defun run-tasks (&tasks) ; 1 + (loop [(queue tasks)] ; 2 + [(empty? queue)] ; 2 + (call/p 'task (car queue) + (lambda (k) + (when (alive? k) + (push-cdr! queue k)))) ; 3 + (recur (cdr queue)))) ; 4 +``` + +1. We begin, of course, by defining our function. As inputs, we take + a list of tasks to run, which are generally functions, but may be Lua + coroutines (`threads`) or existing continuations, too. As a sidenote, + in Urn, variadic arguments have `&` prepended to them, instead of + having symbols beginning of `&` acting as modifiers in a lambda-list. + For clarity, that is wholly equivalent to `(defun run-tasks (&rest + tasks)`{.lisp}. + +2. Then, we take the first element of the queue as the current task to + run, and set up a prompt of execution. The task will run until it + hits an `abort-to-prompt`, at which point it will be interrupted and + the handler will be invoked. + +3. The handler inspects the reified continuation to see if it is + suitable for being scheduled again, and if so, pushes it to the end + of the queue. This means it'll be the first task to execute again + when the scheduler is done with the current set of working tasks. + +4. We loop back to the start with the first element (the task we just + executed) removed. + +Believe it or not, the above is a fully functioning cooperative +scheduler that can execute any number of tasks.[^5] + +### Conclusion + +I think that the addition of delimited continuations to Urn brings +a much needer change in the direction of the project: Moving away from +ad-hoc abstraction to structured, proven abstraction. Hopefully this is +the first of many to come. + + +[^1]: Though this might come off as a weird decision to some, there is +a logical reason behind it: Urn was initially meant to be used in the +[ComputerCraft](https://computercraft.info) mod for Minecraft, which +uses the Lua programming language, though the language has outgrown it +by now. For example, the experimental `readline` support is being +implemented with the LuaJIT foreign function interface. + +[^2]: [The Theory and Practice of First-Class +Prompts](http://www.cs.tufts.edu/~nr/cs257/archive/matthias-felleisen/prompts.pdf). + +[^3]: Oleg Kiselyov demonstrates +[here](http://okmij.org/ftp/continuations/against-callcc.html#traps) +that abstractions built on `call/cc`{.scheme} do not compose. + +[^4]: `call-with-prompt` is a bit of a mouthful, so the alias `call/p` +is blessed. + +[^5]: There's a working example [here](/static/tasks.lisp) ([with syntax +highlighting](/static/tasks.lisp.html)) as runnable Urn. Clone the +compiler then execute `lua bin/urn.lua --run tasks.lisp`. + + diff --git a/pages/posts/2017-08-02-urnmatch.md b/pages/posts/2017-08-02-urnmatch.md new file mode 100644 index 0000000..7588eb0 --- /dev/null +++ b/pages/posts/2017-08-02-urnmatch.md @@ -0,0 +1,231 @@ +--- +title: The Urn Pattern Matching Library +date: August 2, 2017 +--- + +Efficient compilation of pattern matching is not exactly an open problem +in computer science in the same way that implementing say, type systems, +might be, but it's still definitely possible to see a lot of mysticism +surrounding it. + +In this post I hope to clear up some misconceptions regarding the +implementation of pattern matching by demonstrating one such +implementation. Do note that our pattern matching engine is strictly +_linear_, in that pattern variables may only appear once in the match +head. This is unlike other languages, such as Prolog, in which variables +appearing more than once in the pattern are unified together. + +### Structure of a Pattern Match + +Pattern matching always involves a pattern (the _match head_, as we call +it) and a value to be compared against that pattern, the _matchee_. +Sometimes, however, a pattern match will also include a body, to be +evaluated in case the pattern does match. + +```lisp +(case 'some-value ; matchee + [some-pattern ; match head + (print! "some body")]) ; match body +``` + +As a side note, keep in mind that `case`{.lisp} has linear lookup of +match bodies. Though logarithmic or constant-time lookup might be +possible, it is left as an exercise for the reader. + +### Compiling Patterns + +To simplify the task of compiling patterns to an intermade form without +them we divide their compilation into two big steps: compiling the +pattern's test and compiling the pattern's bindings. We do so +_inductively_ - there are a few elementary pattern forms on which the +more complicated ones are built upon. + +Most of these elementary forms are very simple, but two are the +simplest: _atomic forms_ and _pattern variables_. An atomic form is the +pattern correspondent of a self-evaluating form in Lisp: a string, an +integer, a symbol. We compare these for pointer equality. Pattern +variables represent unknowns in the structure of the data, and a way to +capture these unknowns. + ++------------------+----------+-------------+ +| Pattern | Test | Bindings | ++:=================+:=========+:============+ +| Atomic form | Equality | Nothing | ++------------------+----------+-------------+ +| Pattern variable | Nothing | The matchee | ++------------------+----------+-------------+ + +All compilation forms take as input the pattern to compile along with +a symbol representing the matchee. Patterns which involve other patterns +(for instance, lists, conses) will call the appropriate compilation +forms with the symbol modified to refer to the appropriate component of +the matchee. + +Let's quickly have a look at compiling these elementary patterns before +looking at the more interesting ones. + +```lisp +(defun atomic-pattern-test (pat sym) + `(= ,pat ,sym)) +(defun atomic-pattern-bindings (pat sym) + '()) +``` + +Atomic forms are the simplest to compile - we merely test that the +symbol's value is equal (with `=`, which compares identities, instead of +with `eq?` which checks for equivalence - more complicated checks, such +as handling list equality, need not be handled by the equality function +as we handle them in the pattern matching library itself) and emit no +bindings. + +```lisp +(defun variable-pattern-test (pat sym) + `true) +(defun variable-pattern-bindings (pat sym) + (list `(,pat ,sym))) +``` + +The converse is true for pattern variables, which have no test and bind +themselves. The returned bindings are in association list format, and +the top-level macro that users invoke will collect these and them bind +them with `let*`{.lisp}. + +Composite forms are a bit more interesting: These include list patterns +and cons patterns, for instance, and we'll look at implementing both. +Let's start with list patterns. + +To determine if a list matches a pattern we need to test for several +things: + +1. First, we need to test if it actually is a list at all! +2. The length of the list is also tested, to see if it matches the length + of the elements stated in the pattern +3. We check every element of the list against the corresponding elements + of the pattern + +With the requirements down, here's the implementation. + +```lisp +(defun list-pattern-test (pat sym) + `(and (list? ,sym) ; 1 + (= (n ,sym) ,(n pat)) ; 2 + ,@(map (lambda (index) ; 3 + (pattern-test (nth pat index) `(nth ,sym ,index))) + (range :from 1 :to (n pat))))) +``` + +To test for the third requirement, we call a generic dispatch function +(which is trivial, and thus has been inlined) to compile the $n$th pattern +in the list against the $n$th element of the actual list. + +List pattern bindings are similarly easy: + +```lisp +(defun list-pattern-bindings (pat sym) + (flat-map (lambda (index) + (pattern-bindings (nth pat index) `(nth ,sym ,index))) + (range :from 1 :to (n pat)))) +``` + +Compiling cons patterns is similarly easy if your Lisp is proper: We +only need to check for `cons`{.lisp}-ness (or `list`{.lisp}-ness, less +generally), then match the given patterns against the car and the cdr. + +```lisp +(defun cons-pattern-test (pat sym) + `(and (list? ,sym) + ,(pattern-test (cadr pat) `(car ,sym)) + ,(pattern-test (caddr pat) `(cdr ,sym)))) + +(defun cons-pattern-bindings (pat sym) + (append (pattern-bindings (cadr pat) `(car ,sym)) + (pattern-bindings (caddr pat) `(cdr ,sym)))) +``` + +Note that, in Urn, `cons` patterns have the more general form `(pats* +. pat)` (using the asterisk with the usual meaning of asterisk), and can +match any number of elements in the head. It is also less efficient than +expected, due to the nature of `cdr` copying the list's tail. (Our lists +are not linked - rather, they are implemented over Lua arrays, and as +such, removing the first element is rather inefficient.) + +### Using patterns + +Now that we can compile a wide assortment of patterns, we need a way to +actually use them to scrutinize data. For this, we implement two forms: +an improved version of `destructuring-bind`{.lisp} and `case`{.lisp}. + +Implementing `destructuring-bind`{.lisp} is simple: We only have +a single pattern to test against, and thus no search is nescessary. We +simply generate the pattern test and the appropriate bindings, and +generate an error if the pattern does not mind. Generating a friendly +error message is similarly left as an exercise for the reader. + +Note that as a well-behaving macro, destructuring bind will not evaluate +the given variable more than once. It does this by binding it to +a temporary name and scrutinizing that name instead. + +```lisp +(defmacro destructuring-bind (pat var &body) + (let* [(variable (gensym 'var)) + (test (pattern-test pat variable)) + (bindings (pattern-bindings pat variable))] + `(with (,variable ,var) + (if ,test + (progn ,@body) + (error! "pattern matching failure"))))) +``` + +Implementing case is a bit more difficult in a language without +`cond`{.lisp}, since the linear structure of a pattern-matching case +statement would have to be transformed into a tree of `if`-`else` +combinations. Fortunately, this is not our case (pun intended, +definitely.) + +```lisp +(defmacro case (var &cases) + (let* [(variable (gensym 'variable))] + `(with (,variable ,var) + (cond ,@(map (lambda (c) + `(,(pattern-test (car c) variable) + (let* ,(pattern-bindings (car c) variable) + ,@(cdr c)))) + cases))))) +``` + + +Again, we prevent reevaluation of the matchee by binding it to +a temporary symbol. This is especially important in an impure, +expression-oriented language as evaluating the matchee might have side +effects! Consider the following contrived example: + +```lisp +(case (progn (print! "foo") + 123) + [1 (print! "it is one")] + [2 (print! "it is two")] + [_ (print! "it is neither")]) ; _ represents a wild card pattern. +``` + +If the matchee wasn't bound to a temporary value, `"foo"` would be +printed thrice in this example. Both the toy implementation presented +here and the implementation in the Urn standard library will only +evaluate matchees once, thus preventing effect duplication. + +### Conclusion + +Unlike previous blog posts, this one isn't runnable Urn. If you're +interested, I recommend checking out [the actual +implementation](https://gitlab.com/urn/urn/blob/master/lib/match.lisp). +It gets a bit hairy at times, particularly with handling of structure +patterns (which match Lua tables), but it's similar enough to the above +that this post should serve as a vague map of how to read it. + +In a bit of a meta-statement I want to point out that this is the first +(second, technically!) of a series of posts detailing the interesting +internals of the Urn standard library: It fixes two things in the sorely +lacking category: content in this blag, and standard library +documentation. + +Hopefully this series is as nice to read as it is for me to write, and +here's hoping I don't forget about this blag for a year again. diff --git a/pages/posts/2017-08-06-constraintprop.md b/pages/posts/2017-08-06-constraintprop.md new file mode 100644 index 0000000..37029d3 --- /dev/null +++ b/pages/posts/2017-08-06-constraintprop.md @@ -0,0 +1,276 @@ +--- +title: Optimisation through Constraint Propagation +date: August 06, 2017 +--- + +Constraint propagation is a new optimisation proposed for implementation +in the Urn compiler[^mr]. It is a variation on the idea of +flow-sensitive typing in that it is not applied to increasing program +safety, rather being used to improve _speed_. + +### Motivation + +The Urn compiler is decently fast for being implemented in Lua. +Currently, it manages to compile itself (and a decent chunk of the +standard library) in about 4.5 seconds (when using LuaJIT; When using +the lua.org interpreter, this time roughly doubles). Looking at +a call-stack profile of the compiler, we notice a very interesting data +point: about 11% of compiler runtime is spent in the `(type)` function. + +There are two ways to fix this: Either we introduce a type system (which +is insanely hard to do for a language as dynamic as Urn - or Lisp in +general) or we reduce the number of calls to `(type)` by means of +optimisation. Our current plan is to do the latter. + +### How + +The proposed solution is to collect all the branches that the program +has taken to end up in the state it currently is. Thus, every branch +grows the set of "constraints" - the predicates which have been invoked +to get the program here. + +Most useful predicates involve a variable: Checking if it is or isn't +nil, if is positive or negative, even or odd, a list or a string, and +etc. However, when talking about a single variable, this test only has +to be performed _once_ (in general - mutating the variable invalidates +the set of collected constraints), and their truthiness can be kept, by +the compiler, for later use. + +As an example, consider the following code. It has three branches, all +of which imply something different about the type of the variable `x`. + +```lisp +(cond + [(list? x)] ; first case + [(string? x)] ; second case + [(number? x)]) ; third case +``` + +If, in the first case, the program then evaluated `(car x)`, it'd end up +doing a redundant type check. `(car)`, is, in the standard library, +implemented like so: + +```lisp +(defun car (x) + (assert-type! x list) + (.> x 0)) +``` + +`assert-type!` is merely a macro to make checking the types of arguments +more convenient. Let's make the example of branching code a bit more +complicated by making it take and print the `car` of the list. + +```lisp +(cond + [(list? x) + (print! (car x))]) + ; other branches elided for clarity +``` + +To see how constraint propagation would aid the runtime performance of +this code, let's play optimiser for a bit, and see what this code would +end up looking like at each step. + +First, `(car x)` is inlined. +```lisp +(cond + [(list? x) + (print! (progn (assert-type! x list) + (.> x 0)))]) +``` + +`assert-type!` is expanded, and the problem becomes apparent: the type +of `x` is being computed _twice_! + +```lisp +(cond + [(list? x) + (print! (progn (if (! (list? x)) + (error! "the argument x is not a list")) + (.> x 0)))]) +``` + +If the compiler had constraint propagation (and the associated code +motions), this code could be simplified further. + +```lisp +(cond + [(list? x) + (print! (.> x 0))]) +``` + +Seeing as we already know that `(list? x)` is true, we don't need to +test anymore, and the conditional can be entirely eliminated. Figuring +out `(! (list? x))` from `(list? x)` is entirely trivial constant +folding (the compiler already does it) + +This code is optimal. The `(list? x)` test can't be eliminated because +nothing else is known about `x`. If its value were statically known, the +compiler could eliminate the branch and invocation of `(car x)` +completely by constant propagation and folding (`(car)` is, type +assertion notwithstanding, a pure function - it returns the same results +for the same inputs. Thus, it is safe to execute at compile time) + +### How, exactly + +In this section I'm going to outline a very simple implementation of the +constraint propagation algorithm to be employed in the Urn compiler. +It'll work on a simple Lisp with no quoting or macros (thus, basically +the lambda calculus). + +```lisp +(lambda (var1 var2) exp) ; λ-abstraction +(foo bar baz) ; procedure application +var ; variable reference +(list x y z) ; list +t, nil ; boolean +(cond [t1 b1] [t2 b2]) ; conditional +``` + +The language has very simple semantics. It has three kinds of values +(closures, lists and booleans), and only a couple reduction rules. The +evaluation rules are presented as an interpretation function (in Urn, +not the language itself). + +```lisp +(defun interpret (x env) + (case x + [(lambda ?params . ?body) + `(:closure ,params ,body ,(copy env))] ; 1 + [(list . ?xs) + (map (cut interpret <> env) xs)] ; 2 + [t true] [nil false] ; 3 + [(cond . ?alts) ; 4 + (interpret + (block (map (lambda (alt) + (when (interpret (car alt) env) + (break (cdr alt)))))) + env)] + [(?fn . ?args) + (case (eval fn env) + [(:closure ?params ?body ?cl-env) ; 5 + (map (lambda (a k) + (.string a) (interpret k env))) + params args) + (last (map (cut interpret <> env) body))] + [_ (error! $"not a procedure: ${fn}")])] + [else (.> env (symbol->string x))])) +``` + +1. In the case the expression currently being evaluated is a lambda, we + make a copy of the current environment and store it in a _closure_. +2. If a list is being evaluated, we recursively evaluate each + sub-expression and store all of them in a list. +3. If a boolean is being interpreted, they're mapped to the respective + values in the host language. +4. If a conditional is being evaluated, each test is performed in order, + and we abort to interpret with the corresponding body. +5. When evaluating a procedure application, the procedure to apply is + inspected: If it is a closure, we evaluate all the arguments, bind + them along with the closure environment, and interpret the body. If + not, an error is thrown. + +Collecting constraints in a language as simple as this is fairly easy, +so here's an implementation. + +```lisp +(defun collect-constraints (expr (constrs '())) + (case expr + [(lambda ?params . ?body) + `(:constraints (lambda ,params + ,@(map (cut collect-constraints <> constrs) body)) + ,constrs)] +``` + +Lambda expressions incur no additional constraints, so the inner +expressions (namely, the body) receive the old set. +The same is true for lists: + +```lisp + [(list . ?xs) + `(:constraints (list ,@(map (cut collect-constraints <> constrs) xs)) + ,constrs)] +``` + +Booleans are simpler: + +```lisp + [t `(:constraints ,'t ,constrs)] + [nil `(:constraints ,'nil ,constrs)] +``` + +Since there are no sub-expressions to go through, we only associate the +constraints with the boolean values. + +Conditionals are where the real work happens. For each case, we add that +case's test as a constraint in its body. + +```lisp + [(cond . ?alts) + `(:constraints + (cond + ,@(map (lambda (x) + `(,(collect-constraints (car x) constrs) + ,(collect-constraints (cadr x) (cons (car x) constrs)))) + alts)) + ,constrs)] +``` + +Applications are as simple as lists. Note that we make no distinction +between valid applications and invalid ones, and just tag both. + +```lisp + [(?fn . ?args) + `(:constraints + (,(collect-constraints fn constrs) + ,@(map (cut collect-constraints <> constrs) + args)) + ,constrs)] +``` + +References are also straightforward: + +```lisp + [else `(:constraints ,expr ,constrs)])) +``` + +That's it! Now, this information can be exploited to select a case +branch at compile time, and eliminate the overhead of performing the +test again. + +This is _really_ easy to do in a compiler that already has constant +folding of alternatives. All we have to do is associate constraints to +truthy values. For instance: + +```lisp +(defun fold-on-constraints (x) + (case x + [((:constraints ?e ?x) + :when (known? e x)) + 't] + [else x])) +``` + +That's it! We check if the expression is in the set of known +constraints, and if so, reduce it to true. Then, the constant folding +code will take care of eliminating the redundant branches. + +### When + +This is a really complicated question. The Urn core language, +unfortunately, is a tad more complicated, as is the existing optimiser. +Collecting constraints and eliminating tests would be in completely +different parts of the compiler. + +There is also a series of code motions that need to be in place for +constraints to be propagated optimally, especially when panic edges are +involved. Fortunately, these are all simple to implement, but it's still +a whole lot of work. + +I don't feel confident setting a specific timeframe for this, but +I _will_ post more blags on this topic. It's fascinating (for me, at +least) and will hopefully make the compiler faster! + +[^mr]: The relevant merge request can be found +[here](https://gitlab.com/urn/urn/issues/27). + diff --git a/pages/posts/2017-08-15-multimethods.md b/pages/posts/2017-08-15-multimethods.md new file mode 100644 index 0000000..313f9bc --- /dev/null +++ b/pages/posts/2017-08-15-multimethods.md @@ -0,0 +1,280 @@ +--- +title: Multimethods in Urn +date: August 15, 2017 +--- + +`multimethod`, noun. A procedure which decides runtime behaviour based +on the types of its arguments. + +### Introduction + +At some point, most programming language designers realise that they've +outgrown the language's original feature set and must somehow expand it. +Sometimes, this expansion is painless for example, if the language had +already features in place to facilitate this, such as type classes or +message passing. + +In our case, however, we had to decide on and implement a performant +system for extensibility in the standard library, from scratch. For +a while, Urn was using Lua's scheme for modifying the behaviour of +standard library functions: metamethods in metatables. For the +uninitiated, Lua tables can have _meta_-tables attached to modify their +behaviour with respect to several language features. As an example, the +metamethod `__add`{.lua} controls how Lua will add two tables. + +However, this was not satisfactory, the most important reason as to why +being the fact that metamethods are associated with particular object +_instances_, instead of being associated with the _types_ themselves. +This meant that all the operations you'd like to modify had to be +modified in one big go - inside the constructor. Consider the +constructor for hash-sets as it was implemented before the addition of +multimethods. + +```lisp +(defun make-set (hash-function) + (let* [(hash (or hash-function id))] + (setmetatable + { :tag "set" + :hash hash + :data {} } + { :--pretty-print + (lambda (x) + (.. "«hash-set: " (concat (map pretty (set->list x)) " ") "»")) + :--compare #| elided for brevity |# }))) +``` + +That second table, the meta table, is entirely noise. The fact that +constructors also had to specify behaviour, instead of just data, was +annoying from a code style point of view and _terrible_ from a reuse +point of view. Behaviour is closely tied to the implementation - remember +that metamethods are tied to the _instance_. To extend the behaviour of +standard library functions (which you can't redefine) for a type you do +not control (whose constructor you also can not override), you suddenly +need to wrap the constructor and add your own metamethods. + +### Finding a Solution + +Displeased with the situation as it stood, I set out to discover what +other Lisps did, and it seemed like the consensus solution was to +implement open multimethods. And so we did. + +Multimethods - or multiple dispatch in general - is one of the best +solutions to the expression problem. We can easily add new types, and +new operations to work on existing types - and most importantly, this +means touching _no_ existing code. + +Our implementation is, like almost everything in Urn, a combination of +clever (ab)use of macros, tables and functions. A method is represented +as a table - more specifically, a n-ary tree of possible cases, with +a metamethod, `__call`{.lua}, which means multimethods can be called and +passed around like regular functions - they are first-order. + +Upon calling a multimethod, it'll look up the correct method body to +call for the given arguments - or the default method, or throw an error, +if no default method is provided - and tail-call that, with all the +arguments. + +Before diving into the ridiculously simple implementation, let's look at +a handful of examples. + +#### Pretty printing + +Pretty printing is, quite possibly, the simplest application of multiple +dispatch to extensibility. As of +[`ba289d2d`](https://gitlab.com/urn/urn/commit/ba829d2de30e3b1bef4fa1a22a5e4bbdf243426b), +the standard library implementation of `pretty` is a multimethod. + +Before, the implementation[^1] would perform a series of type tests and +decide on the behaviour, including testing if the given object had +a metatable which overrides the pretty-printing behaviour. + +The new implementation is _significantly_ shorter, so much so that I'm +comfortable pasting it here. + +```lisp +(defgeneric pretty (x) + "Pretty-print a value.") +``` + +That's it! All of the logic that used to exist is now provided by the +`defgeneric` macro, and adding support for your types is as simple as +using `defmethod`.[^2] + +```lisp +(defmethod (pretty string) (x) + (format "%q" x)) +``` + +As another example, let's define - and assume the following are separate +modules - a new type, and add pretty printing support for that. + +```lisp +; Module A - A box. +(defun box (x) + { :tag "box" + :value x }) +``` + +The Urn function `type` will look for a `tag` element in tables and +report that as the type if it is present, and that function is what the +multimethod infrastructure uses to determine the correct body to call. +This means that all we need to do if we want to add support for +pretty-printing boxes is use defmethod again! + +```lisp +(defmethod (pretty box) (x) "🎁") +``` + +#### Comparison + +A more complicated application of multiple dispatch for extensibility is +the implementation of the `eq?` method in the standard library. +Before[^3], based on a series of conditionals, the equality test was +chosen at runtime. +Anyone with experience optimising code is wincing at the mere thought of +this code. + +The new implementation of `eq?` is also comically short - a mere 2 lines +for the definition, and only a handful of lines for all the previously +existing cases. + +```lisp +(defgeneric eq? (x y) + "Compare values for equality deeply.") + +(defmethod (eq? symbol symbol) (x y) +(= (get-idx x :contents) (get-idx y :contents))) +(defmethod (eq? string symbol) (x y) (= x (get-idx y :contents))) +(defmethod (eq? symbol string) (x y) (= (get-idx x :contents) y)) +``` + +If we would, as an example, add support for comparing boxes, the +implementation would similarly be short. + +```lisp +(defmethod (eq? box box) (x y) + (= (.> x :value) (.> y :value))) +``` + +### Implementation + +`defgeneric` and `defmethod` are, quite clearly, macros. However, +contrary to what one would expect, both their implementations are +_quite_ simple. + +```lisp +(defmacro defgeneric (name ll &attrs) + (let* [(this (gensym 'this)) + (method (gensym 'method))] + `(define ,name + ,@attrs + (setmetatable + { :lookup {} } + { :__call (lambda (,this ,@ll) + (let* [(,method (deep-get ,this :lookup ,@(map (lambda (x) + `(type ,x)) ll)))] + (unless ,method + (if (get-idx ,this :default) + (set! ,method (get-idx ,this :default)) + (error "elided for brevity"))) + (,method ,@ll))) })))) +``` + +Everything `defgeneric` has to do is define a top-level symbol to hold +the multimethod table, and generate, at compile time, a lookup function +specialised for the correct number of arguments. In a language without +macros, multimethod calls would have to - at runtime - loop over the +provided arguments, take their types, and access the correct elements in +the table. + +As an example of how generating the lookup function at compile time is +better for performance, consider the (cleaned up[^4]) lookup function +generated for the `(eq?)` method defined above. + +```lua +function(this, x, y) + local method + if this.lookup then + local temp1 = this.lookup[type(x)] + if temp1 then + method = temp1[type(y)] or nil + else + method = nil + end + elseif this.default then + method = this.default + end + if not method then + error("No matching method to call for...") + end + return method(x, y) +end +``` + +`defmethod` and `defdefault` are very simple and uninteresting macros: +All they do is wrap the provided body in a lambda expression along with +the proper argument list and associate them to the correct element in +the tree. + +```lisp +(defmacro defmethod (name ll &body) + `(put! ,(car name) (list :lookup ,@(map s->s (cdr name))) + (let* [(,'myself nil)] + (set! ,'myself (lambda ,ll ,@body)) + ,'myself))) +``` + +### Conclusion + +Switching to methods instead of a big if-else chain improved compiler +performance by 12% under LuaJIT, and 2% under PUC Lua. The performace +increase under LuaJIT can be attributed to the use of polymorphic inline +caches to speed up dispatch, which is now just a handful of table +accesses - Doing it with the if-else chain is _much_ harder. + +Defining complex multiple-dispatch methods used to be an unthinkable +hassle what with keeping straight which cases have been defined yet and +which cases haven't, but they're now very simple to define: Just state +out the number of arguments and list all possible cases. + +The fact that multimethods are _open_ means that new cases can be added +on the fly, at runtime (though this is not officially supported, and we +don't claim responsibility if you shoot your own foot), and that modules +loaded later may improve upon the behaviour of modules loaded earlier. +This means less coupling between the standard library, which has been +growing to be quite large. + +This change has, in my opinion, made Urn a lot more expressive as +a language, and I'd like to take a minute to point out the power of the +Lisp family in adding complicated features such as these as merely +library code: no changes were made to the compiler, apart from a tiny +one regarding environments in the REPL - previously, it'd use the +compiler's version of `(pretty)` even if the user had overridden it, +which wasn't a problem with the metatable approach, but definitely is +with the multimethod approach. + +Of course, no solution is all _good_. Compiled code size has increased +a fair bit, and for the Urn compiler to inline across multimethod +boundaries would be incredibly difficult - These functions are +essentially opaque boxes to the compiler. + +Dead code elimination is harder, what with defining functions now being +a side-effect to be performed at runtime - Telling which method cases +are or aren't used is incredibly difficult with the extent of the +dynamicity. + +[^1]: +[Here](https://gitlab.com/urn/urn/blob/e1e9777498e1a7d690e3b39c56f616501646b5da/lib/base.lisp#L243-270). +Do keep in mind that the implementation is _quite_ hairy, and grew to be +like that because of our lack of a standard way of making functions +extensible. + +[^2]: `%q` is the format specifier for quoted strings. + +[^3]: +[Here](https://gitlab.com/urn/urn/blob/e1e9777498e1a7d690e3b39c56f616501646b5da/lib/type.lisp#L116-1420). +Do keep in mind that that the above warnings apply to this one, too. + +[^4]: [The original generated code](/static/generated_code.lua.html) is +quite similar, except the generated variable names make it a tad harder +to read. diff --git a/pages/posts/2017-09-08-dependent-types.md b/pages/posts/2017-09-08-dependent-types.md new file mode 100644 index 0000000..377af1d --- /dev/null +++ b/pages/posts/2017-09-08-dependent-types.md @@ -0,0 +1,516 @@ +--- +title: Dependent Types +date: September 08, 2017 +maths: true +--- + +Dependent types are pretty cool, yo. This post is a semi-structured +ramble about [dtt](https://ahti-saarelainen.zgrep.org/git/hydraz/dtt), +a small dependently-typed "programming language" inspired by Thierry +Coquand's Calculus of (inductive) Constructions (though, note that the +_induction_ part is still lacking: There is support for defining +inductive data types, and destructuring them by pattern matching, but +since there's no totality checker, recursion is disallowed). + +`dtt` is written in Haskell, and served as a learning experience both in +type theory and in writing programs using [extensible +effects](https://hackage.haskell.org/package/freer). I *do* partly regret +the implementation of effects I chose (the more popular +[`extensible-effects`](https://hackage.haskell.org/package/extensible-effects) +did not build on the Nixpkgs channel I had, so I went with `freer`; +Refactoring between these should be easy enough, but I still haven't +gotten around to it, yet) + +I originally intended for this post to be a Literate Haskell file, +interleaving explanation with code. However, for a pet project, `dtt`'s +code base quickly spiralled out of control, and is now over a thousand +lines long: It's safe to say I did not expect this one bit. + +### The language + +`dtt` is a very standard $\lambda_{\prod{}}$ calculus. We have all 4 axes of +Barendgret's lambda cube, in virtue of having types be first class +values: Values depending on values (functions), values depending on +types (polymorphism), types depending on types (type operators), and +types depending on values (dependent types). This places dtt squarely at +the top, along with other type theories such as the Calculus of +Constructions (the theoretical basis for the Coq proof assistant) and TT +(the type theory behind the Idris programming language). + +The syntax is very simple. We have the standard lambda calculus +constructs - $\lambda$-abstraction, application and variables - along +with `let`{.haskell}-bindings, pattern matching `case` expression, and +the dependent type goodies: $\prod$-abstraction and `Set`{.haskell}. + +_As an aside_, pi types are called as so because the dependent function +space may (if you follow the "types are sets of values" line of +thinking) be viewed as the cartesian product of types. Consider a type +`A`{.haskell} with inhabitants `Foo`{.haskell}, `Bar`{.haskell} and +a type `B`{.haskell} with inhabitant `Quux`{.haskell}. A dependent +product $\displaystyle\prod_{(x: \mathtt{A})}\mathtt{B}$, then, has +inhabitants `(Foo, Quux)`{.haskell} and `(Bar, Quux)`{.haskell}. + +You'll notice that dtt does not have a dedicated arrow type. Indeed, the +dependent product subsumes both the $\forall$ quantifier of System $F$, +and the arrow type $\to$ of the simply-typed lambda calculus. Keep this +in mind: It'll be important later. + +Since dtt's syntax is unified (i.e., there's no stratification of terms +and types), the language can be - and is - entirely contained in +a single algebraic data type. All binders are _explicitly typed_, seeing +as inference for dependent types is undecidable (and, therefore, +bad).[^1] + +```haskell +type Type = Term +data Term + = Variable Var + | Set Int + | TypeHint Term Type + | Pi Var Type Type + | Lam Var Type Term + | Let Var Term Term + | App Term Term + | Match Term [(Pattern, Term)] + deriving (Eq, Show, Ord) +``` + +The `TypeHint`{.haskell} term constructor, not mentioned before, is +merely a convenience: It allows the programmer to check their +assumptions and help the type checker by supplying a type (Note that we +don't assume this type is correct, as you'll see later; It merely helps +guide inference.) + +Variables aren't merely strings because of the large amount of +substitutions we have to perform: For this, instead of generating a new +name, we increment a counter attached to the variable - the pretty +printer uses the original name to great effect, when unambiguous. + +```haskell +data Var + = Name String + | Refresh String Int + | Irrelevant + deriving (Eq, Show, Ord) +``` + +The `Irrelevant`{.haskell} variable constructor is used to support $a +\to b$ as sugar for $\displaystyle\prod_{(x: a)} b$ when $x$ does not +appear free in $b$. As soon as the type checker encounters an +`Irrelevant`{.haskell} variable, it is refreshed with a new name. + +`dtt` does not have implicit support (as in Idris), so all parameters, +including type parameters, must be bound explicitly. For this, we +support several kinds of syntatic sugar. First, all abstractions support +multiple variables in a _binding group_. This allows the programmer to +write `(a, b, c : α) -> β` instead of `(a : α) -> (b : α) -> (c : α) -> +β`. Furthermore, there is special syntax `/\a` for single-parameter +abstraction with type `Set 0`{.haskell}, and lambda abstractions support +multiple binding groups. + +As mentioned before, the language does not support recursion (either +general or well-founded). Though I would like to, writing a totality +checker is hard - way harder than type checking $\lambda_{\prod{}}$, in +fact. However, an alternative way of inspecting inductive values _does_ +exist: eliminators. These are dependent versions of catamorphisms, and +basically encode a proof by induction. An inductive data type as Nat +gives rise to an eliminator much like it gives rise to a natural +catamorphism. + +``` +inductive Nat : Type of { + Z : Nat; + S : Nat -> Nat +} + +natElim : (P : Nat -> Type) + -> P Z + -> ((k : Nat) -> P k -> P (S k)) + -> (n : Nat) + -> P n +``` + +If you squint, you'll see that the eliminator models a proof by +induction (of the proposition $P$) on the natural number $n$: The type +signature basically states "Given a proposition $P$ on $\mathbb{N}$, +a proof of $P_0$, a proof that $P_{(k + 1)}$ follows from $P_k$ and +a natural number $n$, I'll give you a proof of $P_n$." + +This understanding of computations as proofs and types as propositions, +by the way, is called the [Curry-Howard +Isomorphism](https://en.wikipedia.org/wiki/Curry-Howard_correspondence). +The regular, simply-typed lambda calculus corresponds to natural +deduction, while $\lambda_{\prod{}}$ corresponds to predicate logic. + +### The type system + +Should this be called the term system? + +Our type inference algorithm, contrary to what you might expect for such +a complicated system, is actually quite simple. Unfortunately, the code +isn't, and thus isn't reproduced in its entirety below. + +#### Variables + +The simplest case in any type system. The typing judgement that gives +rise to this case is pretty much the identity: $\Gamma \vdash \alpha: +\tau \therefore \Gamma \vdash \alpha: \tau$. If, from the current typing +context we know that $\alpha$ has type $\tau$, then we know that +$\alpha$ has type $\tau$. + +```haskell + Variable x -> do + ty <- lookupType x -- (I) + case ty of + Just t -> pure t -- (II) + Nothing -> throwError (NotFound x) -- (III) +``` + +1. Look up the type of the variable in the current context. +2. If we found a type for it, then return that (this is the happy path) +3. If we didn't find a type for it, we raise a type error. + +#### `Set`{.haskell}s + +Since dtt has a cummulative hierarchy of universes, $\mathtt{Set}_k: +\mathtt{Set}_{(k + 1)}$. This helps us avoid the logical inconsistency +introduced by having _type-in-type_[^2], i.e. $\mathtt{Type}: +\mathtt{Type}$. We say that $\mathtt{Set}_0$ is the type of _small +types_: in fact, $\mathtt{Set}_0$ is where most computation actually +happens, seeing as $\mathtt{Set}_k$ for $k \ge 1$ is reserved for +$\prod$-abstractions quantifying over such types. + +```haskell + Set k -> pure . Set . (+1) $ k +``` + +#### Type hints + +Type hints are the first appearance of the unification engine, by far +the most complex part of dtt's type checker. But for now, suffices to +know that ``t1 `assertEquality` t2``{.haskell} errors if the types t1 +and t2 can't be made to _line up_, i.e., unify. + +For type hints, we infer the type of given expression, and compare it +against the user-provided type, raising an error if they don't match. +Because of how the unification engine works, the given type may be more +general (or specific) than the inferred one. + +```haskell + TypeHint v t -> do + it <- infer v + t `assertEquality` it + pure t +``` + +#### $\prod$-abstractions + +This is where it starts to get interesting. First, we mandate that the +parameter type is inhabited (basically, that it _is_, in fact, a type). +The dependent product $\displaystyle\prod_{(x : 0)} \alpha$, while allowed by the +language's grammar, is entirely meaningless: There's no way to construct +an inhabitant of $0$, and thus this function may never be applied. + +Then, in the context extended with $(\alpha : \tau)$, we require that +the consequent is also a type itself: The function +$\displaystyle\prod_{(x: \mathbb{N})} 0$, while again a valid parse, is +also meaningless. + +The type of the overall abstraction is, then, the maximum value of the +indices of the universes of the parameter and the consequent. + +```haskell + Pi x p c -> do + k1 <- inferSet tx + k2 <- local (insertType (x, p)) $ + inferSet c + pure $ Set (k1 `max` k2) +``` + +#### $\lambda$-abstractions + +Much like in the simply-typed lambda calculus, the type of +a $\lambda$-abstraction is an arrow between the type of its parameter +and the type of its body. Of course, $\lambda_{\prod{}}$ incurs the +additional constraint that the type of the parameter is inhabited. + +Alas, we don't have arrows. So, we "lift" the lambda's parameter to the +type level, and bind it in a $\prod$-abstraction. + +```haskell + Lam x t b -> do + _ <- inferSet t + Pi x t <$> local (insertType (x, t)) (infer b) +``` + +Note that, much like in the `Pi`{.haskell} case, we type-check the body +in a context extended with the parameter's type. + +#### Application + +Application is the most interesting rule, as it has to not only handle +inference, it also has to handle instantiation of $\prod$-abstractions. + +Instantation is, much like application, handled by $\beta$-reduction, +with the difference being that instantiation happens during type +checking (applying a $\prod$-abstraction is meaningless) and application +happens during normalisation (instancing a $\lambda$-abstraction is +meaningless). + +The type of the function being applied needs to be +a $\prod$-abstraction, while the type of the operand needs to be +inhabited. Note that the second constraint is not written out +explicitly: It's handled by the `Pi`{.haskell} case above, and +furthermore by the unification engine. + +```haskell + App e1 e2 -> do + t1 <- infer e1 + case t1 of + Pi vr i o -> do + t2 <- infer e2 + t `assertEquality` i + N.normalise =<< subst [(vr, e2)] o -- (I) + e -> throwError (ExpectedPi e) -- (II) +``` + +1. Notice that, here, we don't substitute the $\prod$-bound variable by + the type of $e_2$: That'd make us equivalent to System $F$. The whole + _deal_ with dependent types is that types depend on values, and that + entirely stems from this one line. By instancing a type variable with + a value, we allow _types_ to depend on _values_. + +2. Oh, and if we didn't get a $\prod$-abstraction, error. + +--- + +You'll notice that two typing rules are missing here: One for handling +`let`{.haskell}s, which was not included because it is entirely +uninteresting, and one for `case ... of`{.haskell} expressions, which +was redacted because it is entirely a mess. + +Hopefully, in the future, the typing of `case` expressions is simpler +- if not, they'll probably be replaced by eliminators. + +### Unification and Constraint Solving + +The unification engine is the man behind the curtain in type checking: +We often don't pay attention to it, but it's the driving force behind it +all. Fortunately, in our case, unification is entirely trivial: Solving +is the hard bit. + +The job of the unification engine is to produce a set of constraints +that have to be satisfied in order for two types to be equal. Then, the +solver is run on these constraints to assert that they are logically +consistent, and potentially produce substitutions that _reify_ those +constraints. +Our solver isn't that cool, though, so it just verifies consitency. + +The kinds of constraints we can generate are as in the data type below. + +```haskell +data Constraint + = Instance Var Term -- (1) + | Equal Term Term -- (2) + | EqualTypes Type Type -- (3) + | IsSet Type -- (4) + deriving (Eq, Show, Ord) +``` + +1. The constraint `Instance v t`{.haskell} corresponds to a substitution + between `v` and the term `t`. +2. A constraint `Equal a b`{.haskell} states that the two terms `a` and + `b` are equal under normalisation. +3. Ditto, but with their _types_ (We normalise, infer, and check for + equality) +4. A constraint `IsSet t`{.haskell} asserts that the provided type has + inhabitants. + +#### Unification + +Unification of most terms is entirely uninteresting. Simply line up the +structures and produce the appropriate equality (or instance) +constraints. + +```haskell +unify (Variable a) b = instanceC a b +unify b (Variable a) = instanceC a b +unify (Set a) (Set b) | a == b = pure [] +unify (App x y) (App x' y') = + (++) <$> unify x x' <*> unify y y' +unify (TypeHint a b) (TypeHint c d) = + (++) <$> unify a c <*> unify b d +unify a b = throwError (NotEqual a b) +``` + +Those are all the boring cases, and I'm not going to comment on them. +Similarly boring are binders, which were abstracted out because hlint +told me to. + +```haskell +unify (Lam v1 t1 b1) (Lam v2 t2 b2) = unifyBinder (v1, v2) (t1, t2) (b1, b2) +unify (Pi v1 t1 b1) (Pi v2 t2 b2) = unifyBinder (v1, v2) (t1, t2) (b1, b2) +unify (Let v1 t1 b1) (Let v2 t2 b2) = unifyBinder (v1, v2) (t1, t2) (b1, b2) +unifyBinder (v1, v2) (t1, t2) (b1, b2) = do + (a, b) <- (,) <$> unify (Variable v1) (Variable v2) <*> unify t1 t2 + ((a ++ b) ++) <$> unify b1 b2 +``` + +There are two interesting cases: Unification between some term and a pi +abstraction, and unification between two variables. + +```haskell +unify ta@(Variable a) tb@(Variable b) + | a == b = pure [] + | otherwise = do + (x, y) <- (,) <$> lookupType a <*> lookupType b + case (x, y) of + (Just _, Just _) -> do + ca <- equalTypesC ta tb + cb <- equalC ta tb + pure (ca ++ cb) + (Just x', Nothing) -> instanceC b x' + (Nothing, Just x') -> instanceC a x' + (Nothing, Nothing) -> instanceC a (Variable b) +``` + +If the variables are syntactically the same, then we're done, and no +constraints have to be generated (Technically you could generate an +entirely trivial equality constraint, but this puts unnecessary pressure +on the solver). + +If either variable has a known type, then we generate an instance +constraint between the unknown variable and the known one. + +If both variables have a value, we equate their types' types and their +types. This is done mostly for error messages' sakes, seeing as if two +values are propositionally equal, so are their types. + +Unification between a term and a $\prod$-abstraction is the most +interesting case: We check that the $\prod$ type abstracts over a type +(i.e., it corresponds to a System F $\forall$ instead of a System +F $\to$), and _instance_ the $\prod$ with a fresh type variable. + +```haskell +unifyPi v1 t1 b1 a = do + id <- refresh Irrelevant + ss <- isSetC t1 + pi' <- subst [(v1, Variable id)] b1 + (++ ss) <$> unify a pi' + +unify a (Pi v1 t1 b1) = unifyPi v1 t1 b1 a +unify (Pi v1 t1 b1) a = unifyPi v1 t1 b1 a +``` + +#### Solving + +Solving is a recursive function of the list of constraints (a +catamorphism!) with some additional state: Namely, a strict map of +already-performed substitutions. Let's work through the cases in reverse +order of complexity (and, interestingly, reverse order of how they're in +the source code). + +##### No constraints + +Solving an empty list of constraints is entirely trivial. + +```haskell +solveInner _ [] = pure () +``` + +#### `IsSet`{.haskell} + +We infer the index of the universe of the given type, much like in the +inferrence case for $\prod$-abstractions, and check the remaining +constraints. + +```haskell +solveInner map (IsSet t:xs) = do + _ <- inferSet t + solveInner map xs +``` + +#### `EqualTypes`{.haskell} + +We infer the types of both provided values, and generate an equality +constraint. + +```haskell +solveInner map (EqualTypes a b:xs) = do + ta <- infer a + tb <- infer b + solveInner map (Equal ta tb:xs) +``` + +#### `Equal`{.haskell} + +We merely have to check for syntactic equality of the (normal forms of) +terms, because the hard lifting of destructuring and lining up was done +by the unification engine. + +```haskell +solveInner map (Equal a b:xs) = do + a' <- N.normalise a + b' <- N.normalise b + eq <- equal a' b' + if eq + then solveInner map xs + else throwError (NotEqual a b) +``` + +#### `Instance`{.haskell} + +If the variable we're instancing is already in the map, and the thing +we're instancing it to _now_ is not the same as before, we have an +inconsistent set of substitutions and must error. + +```haskell +solveInner map (Instance a b:xs) + | a `M.member` map + , b /= map M.! a + , Irrelevant /= a + = throwError $ InconsistentSubsts (a, b) (map M.! a) +``` + +Otherwise, if we have a coherent set of instances, we add the instance +both to scope and to our local state map and continue checking. + +```haskell + | otherwise = + local (insertType (a, b)) $ + solveInner (M.insert a b map) xs +``` + +--- + +Now that we have both `unify` and `solve`, we can write +`assertEquality`: We unify the two types, and then try to solve the set +of constraints. + +```haskell +assertEquality t1 t2 = do + cs <- unify t1 t2 + solve cs +``` + +The real implementation will catch and re-throw any errors raised by +`solve` to add appropriate context, and that's not the only case where +"real implementation" and "blag implementation" differ. + +### Conclusion + +Wow, that was a lot of writing. This conclusion begins on exactly the +500th line of the Markdown source of this article, and this is the +longest article on this blag (by far). However, that's not to say it's +bad: It was amazing to write, and writing `dtt` was also amazing. I am +not good at conclusions. + +`dtt` is available under the BSD 3-clause licence, though I must warn +you that the source code hasn't many comments. + +I hope you learned nearly as much as I did writing this by reading it. + +[^1]: As [proven](https://link.springer.com/chapter/10.1007/BFb0037103) by Gilles Dowek. +[^2]: See [System U](https://en.wikipedia.org/wiki/System_U), also +Girard's paradox - the type theory equivalent of [Russell's +paradox](https://en.wikipedia.org/wiki/Russell%27s_paradox). diff --git a/pages/posts/2018-01-18-amulet.md b/pages/posts/2018-01-18-amulet.md new file mode 100644 index 0000000..9fae97f --- /dev/null +++ b/pages/posts/2018-01-18-amulet.md @@ -0,0 +1,456 @@ +--- +title: The Amulet Programming Language +date: January 18, 2018 +--- + +As you might have noticed, I like designing and implementing programming +languages. This is another of these projects. Amulet is a +strictly-evaluated, statically typed impure roughly functional +programming language with support for parametric data types and rank-1 +polymorphism _à la_ Hindley-Milner (but [no +let-generalization](#letgen)), along with row-polymorphic records. While +syntactically inspired by the ML family, it's a disservice to those +languages to group Amulet with them, mostly because of the (present) +lack of modules. + +Planned features (that I haven't even started working on, as of writing +this post) include generalized algebraic data types, modules and modular +implicits, a reworked type inference engine based on _OutsideIn(X)_[^4] +to support the other features, and, perhaps most importantly, a back-end +that's not a placeholder (i.e. something that generates either C or LLVM +and can be compiled to a standalone executable). + +The compiler is still very much a work in progress, and is actively +being improved in several ways: Rewriting the parser for efficiency +concerns (see [Lexing and Parsing](#parser)), improving the quality of +generated code by introducing more intermediate representations, and +introducing several optimisations on the one intermediate language we +_do_ have. + +## The Technical Bits + +In this section, I'm going to describe the implementation of the +compiler as it exists at the time of writing - warts and all. +Unfortunately, we have a bit too much code for all of it to fit in this +blag post, so I'm only going to include the horribly broken bits here, +and leave the rest out. Of course, the compiler is open source, and is +available on my [GitHub][2]. + +### Lexing and Parsing {#parser} + +To call what we have a _lexer_ is a bit of an overstatement: The +`Parser.Lexer` module, which underpins the actual parser, contains only +a handful of imports and some definitions for use with [Parsec's][3] +[`Text.Parsec.Token`][4] module; Everything else is boilerplate, namely, +declaring, at top-level, the functions generated by `makeTokenParser`. + +Our parser is then built on top of this infrastructure (and the other +combinators provided by Parsec) in a monadic style. Despite having +chosen to use strict `Text`s, many of the Parsec combinators return +`Char`s, and using the Alternative type class' ability to repeat actions +makes linked lists of these - the dreaded `String` type. Due to this, +and other inefficiencies, the parser is ridiculously bad at memory +management. + +However, it does have some cute hacks. For example, the pattern parser +has to account for being used in the parsing of both `match`{.ml} and +`fun`{.ml} - in the former, destructuring patterns may appear without +parenthesis, but in the latter, they _must_ be properly parenthesised: +since `fun`{.ml} may have multiple patterns, it would be ambiguous if +`fun Foo x -> ...`{.ml} is destructuring a `Foo` or takes two arguments. + +Instead of duplicating the pattern parser, one for `match`{.ml}es and +one for function arguments, we instead _parametrised_ the parser over +needing parenthesis or not by adding a rank-2 polymorphic continuation +argument. + +```haskell +patternP :: (forall a. Parser a -> Parser a) -> Parser Pattern' +patternP cont = wildcard <|> {- some bits omitted -} try destructure where + destructure = withPos . cont $ do + ps <- constrName + Destructure ps <$> optionMaybe (patternP id) +``` + +When we're parsing a pattern `match`{.ml}-style, the continuation given +is `id`, and when we're parsing an argument, the continuation is +`parens`. + +For the aforementioned efficiency concerns, however, we've decided to +scrap the Parsec-based parser and move to an Alex/Happy based solution, +which is not only going to be more maintainable and more easily hackable +in the future, but will also be more efficient overall. Of course, for +a toy compiler such as this one, efficiency doesn't matter that much, +but using _one and a half gigabytes_ to compile a 20-line file is really +bad. + +### Renaming {#renamer} + +To simplify scope handling in both the type checker and optimiser, after +parsing, each variable is tagged with a globally unique integer that is +enough to compare variables. This also lets us use more efficient data +structures later in the compiler, such as `VarSet`, which stores only the +integer identifier of a variable in a big-endian Patricia tree[^1]. + +Our approach, described in _[Secrets of the Glasgow Haskell Compiler +inliner][5]_ as "the Sledgehammer", consists of duplicating _every_ +bound variable to avoid name capture problems. However, while the first +of the listed disadvantages surely does apply, by doing all of the +_renaming_ in one go, we mostly avoid the latter. Of course, since then, +the Haskell ecosystem has evolved significantly, and the plumbing +required is a lot less intrusive. + +In our compiler, we use MTL-style classes instead of concrete monad +transformer stacks. We also run every phase after parsing in a single +`GenT`{.haskell} monad, which provides a fresh supply of integers for +names. "Plumbing" the fresh name supply, then, only involves adding a +`MonadGen Int m` constraint to the context of functions that need it. + +Since the string component of parsed names is not thrown away, we also +have to make up strings themselves. This is where another cute hack +comes in: We generate, lazily, an infinite stream of names that goes +`["a" .. "z", "aa" .. "az", "ba" .. "bz", ..]`, then use the +`MonadGen`{.haskell} counter as an index into that stream. + +```haskell +alpha :: [Text] +alpha = map T.pack $ [1..] >>= flip replicateM ['a'..'z'] +``` + +### Desugaring + +The desugarer is a very simple piece of code which, through use of _Scrap +Your Boilerplate_-style generic programming, traverses the syntax tree +and rewrites nodes representing syntax sugar to their more explicit +versions. + +Currently, the desugarer only expands _sections_: That is, expressions +of the form `(+ e)` become `fun x -> x + e` (where `e` is a fresh name), +expressions like `(e +)` become `fun x -> e + x`, and expressions like +`.foo` becomes `fun x -> x.foo`. + +This is the only component of the compiler that I can reasonably +include, in its entirety, in this post. + +```haskell +desugarProgram = everywhereM (mkM defaults) where + defaults :: Expr Parsed -> m (Expr Parsed) + defaults (BothSection op an) = do + (ap, ar) <- fresh an + (bp, br) <- fresh an + pure (Fun ap (Fun bp (BinOp ar op br an) an) an) + defaults (LeftSection op vl an) = do + (cap, ref) <- fresh an + pure (Fun cap (BinOp ref op vl an) an) + defaults (RightSection op vl an) = do + (cap, ref) <- fresh an + pure (Fun cap (BinOp vl op ref an) an) + defaults (AccessSection key an) = do + (cap, ref) <- fresh an + pure (Fun cap (Access ref key an) an) + defaults x = pure x +``` + +### Type Checking + +By far the most complicated stage of the compiler pipeline, our +inference algorithm is modelled after Algorithm W (extended with kinds +and kind inference), with constraint generation and solving being two +separate steps. + +We first traverse the syntax tree, in order, making up constraints and +fresh type variables as needed, then invoke a unification algorithm to +produce a substitution, then apply that over both the generated type (a +skeleton of the actual result) and the syntax tree (which is explicitly +annotated with types everywhere). + +The type inference code also generates and inserts explicit type +applications when instancing polymorphic types, since we internally +lower Amulet into a System F core language with explicit type +abstraction and application. We have `TypeApp` nodes in the syntax tree +that never get parsed or renamed, and are generated by the type checker +before lowering happens. + +Our constraint solver is quite rudimentary, but it does the job nicely. +We operate with a State monad with the current substitution. When we +unify a variable with another type, it is added to the current +substitution. Everything else is just zipping the types together. When +we try to unify, say, a function type with a constructor, that's an +error. If a variable has already been added to the current substitution and +encounter it again, the new type is unified with the previously recorded +one. + +```haskell +unify :: Type Typed -> Type Typed -> SolveM () +unify (TyVar a) b = bind a b +unify a (TyVar b) = bind b a +unify (TyArr a b) (TyArr a' b') = unify a a' *> unify b b' +unify (TyApp a b) (TyApp a' b') = unify a a' *> unify b b' +unify ta@(TyCon a) tb@(TyCon b) + | a == b = pure () + | otherwise = throwError (NotEqual ta tb) +``` + +This is only an excerpt, because we have very complicated types. + +#### Polymorphic Records + +One of Amulet's selling points (if one could call it that) is its support +for row-polymorphic records. We have two types of first-class record +types: _closed_ record types (the type of literals) and _open_ record +types (the type inferred by record patterns and field getters.). Open +record types have the shape `{ 'p | x_n : t_n ... x_n : t_n }`{.ml}, +while closed records lack the type variable `'p`{.ml}. + +Unification of records has 3 cases, but in all 3 cases it is checked that +fields present in both records have unifiable types. + +- When unifying an open record with a closed one, present in both +records have unifiable types, and instance the type variable to contain +the extra fields. +- When unifying two closed records, they must have exactly the same +shape and unifiable types for common fields. +- When unifying two open record types, a new fresh type variable is +created to use as the "hole" and tack the fields together. + +As an example, `{ x = 1 }` has type `{ x : int }`{.ml}, the function +`fun x -> x.foo` has type `{ 'p | foo : 'a } -> 'a`{.ml}, and +`(fun r -> r.x) { y = 2 }` is a type error[^2]. + +#### No Let Generalisation {#letgen} + +Vytiniotis, Peyton Jones and Schrijvers argue[^5] that HM-style +`let`{.ml} generalisation interacts badly with complex type system +extensions such as GADTs and type families, and should therefore be +omitted from such systems. In a deviation from the paper, GHC 7.2 +reintroduces `let`{.ml} generalisation for local definitions that meet +some criteria[^3]. + +> Here's the rule. With `-XMonoLocalBinds` (the default), a binding +> without a type signature is **generalised only if all its free variables +> are closed.** +> +> A binding is **closed** if and only if +> +> - It has a type signature, and the type signature has no free variables; or +> - It has no type signature, and all its free variables are closed, and it +is unaffected by the monomorphism restriction. And hence it is fully +generalised. + +We, however, have chosen to follow that paper to a tee. Despite not +(yet!) having any of those fancy type system features that interact +poorly with let generalisation, we do not generalise _any_ local +bindings. + + +### Lowering + +After type checking is done (and, conveniently, type applications have +been left in the correct places for us by the type checker), Amulet code +is converted into an explicitly-typed intermediate representation, in +direct style, which is used for (local) program optimisation. The AST is +simplified considerably: from 19 constructors to 9. + +Type inference is no longer needed: the representation of core is packed +with all the information we need to check that programs are +type-correct. This includes types in every binder (lambda abstractions, +`let`{.ml}s, pattern bindings in `match`{.ml}), big-lambda abstractions +around polymorphic values (a $\lambda$ binds a value, while a $\Lambda$ +binds a type), along with the already mentioned type applications. + +Here, code also gets the error branches for non-exhaustive `match`{.ml} +expressions, and, as a general rule, gets a lot uglier. + +```ocaml +let main _ = (fun r -> r.x) { x = 2 } + +(* Is elaborated into *) + +let main : ∀ 'e. 'e -> int = + Λe : *. λk : 'e. match k { + (p : 'e) : 'e -> (λl : { 'g | x : int }. match l { + (r : { 'g | x : int }) : { 'g | x : int } -> match r { + { (n : { 'g | x : int }) | x = (m : int) } : { 'g | x : int } -> m + }; + (o : { 'g | x : int }) : { 'g | x : int } -> + error @int "[1:15 .. 1:27]" + }) ({ {} | x : int = 2 }); + (q : 'e) : 'e -> error @int "[1:14 .. 1:38]" + } +``` + +### Optimisation + +As the code we initially get from lowering is ugly and inefficient - +along with being full of the abstractions functional programs have by +nature, it is full of redundant matches created by e.g. the fact that +functions can not do pattern matching directly, and that field access +gets reduced to pattern matching - the optimiser's job is to make it +prettier, and more efficient. + +The optimiser works by applying, in order, a series of local +transformations operating on individual sub-terms to produce an efficient +program, 25 times. The idea of applying them several times is that, when +a simplification pass kicks in, more simplification opportunities might +arise. + +#### `dropBranches`, `foldExpr`, `dropUselessLets` + +These trivial passes remove similarly trivial pieces of code that only +add noise to the program. `dropBranches` will do its best to remove +redundant arms from a `match`{.ml} expression, such as those that +appear after an irrefutable pattern. `foldExpr` reduces uses of +operators where both sides are known, e.g. `2 + 2` (replaced by the +literal `5`) or `"foo " ^ "bar"` (replaced by the literal `"foo +bar"`). `dropUselessLets` removes `let`{.ml}s that bind unused variables +whose right-hand sides are pure expressions. + +#### `trivialPropag`, `constrPropag` + +The Amulet optimiser does inlining decisions in two (well, three) +separate phases: One is called _propagation_, in which a `let` decides +to propagate its bound values into the expression, and the other is the +more traditional `inlining`, where variables get their values from the +context. + +Propagation is by far the easiest of the two: The compiler can see both +the definitions and all of the use sites, and could in theory decide if +propagating is beneficial or not. Right now, we propagate all literals +(and records made up solely of other trivial expressions), and do a +round of propagation that is best described as a rule. + +```ocaml +let { v = C e } in ... v ... +(* becomes *) +let { v' = e } in ... C v' ... +``` + +This _constructor propagation_ allows the `match`{.ml} optimisations to kick +in more often, and is semantics preserving. + +#### `match`{.ml}-of-known-constructor + +This pass identifies `match`{.ml} expressions where we can statically +determine the expression being analysed and, therefore, decide which +branch is going to be taken. + +```ocaml +match C x with +| C e -> ... e ... +... +(* becomes *) +... x ... +``` + +#### `match`{.ml}-of-bottom + +It is always safe to turn a `match`{.ml} where the term being matched is a +diverging expression into only that diverging expression, thus reducing +code size several times. + +```ocaml +match (error @int "message") with ... +(* becomes *) +error @int "message" +``` + +As a special case, when one of the arms is itself a diverging +expression, we use the type mentioned in that application to `error` to +fix up the type of the value being scrutinized. + +```ocaml +match (error @foo "message") with +| _ -> error @bar "message 2" +... +(* becomes *) +error @bar "message" +``` + +#### `match`{.ml}-of-`match`{.ml} + +This transformation turns `match`{.ml} expressions where the expression +being dissected is itself another `match`{.ml} "inside-out": we push the +branches of the _outer_ `match`{.ml} "into" the _inner_ `match` (what +used to be the expression being scrutinized). In doing so, sometimes, +new opportunities for match-of-known-constructor arise, and the code +ends up simpler. + +```ocaml +match (match x with + | A -> B + | C -> D) with + | B -> e + | D -> f +(* becomes *) +match x with + | A -> match B with + | B -> e + | D -> f + | C -> match D with + | B -> e + | D -> f +``` + +A clear area of improvement here is extracting the outer branches into +local `let`{.ml}-bound lambda abstractions to avoid an explosion in code +size. + +#### `inlineVariable`, `betaReduce` + +In this pass, use of a variable is replaced with the definition of that +variable, if it meets the following conditions: + +- The variable is a lambda abstraction; and +- The lambda abstraction's body is not too _expensive_. Computing the +cost of a term boils down to computing the depth of the tree +representing that term, with some extra cost added to some specific +types of expression. + +In doing this, however, we end up with pathological terms of the form +`(fun x -> e) y`{.ml}. The `betaReduce` pass turns this into `let x = y in +e`{.ml}. We generate `let`{.ml} bindings instead of substituting the +variable with the parameter to maintain the same evaluation order and +observable effects of the original code. This does mean that, often, +propagation kicks in and gives rise to new simplification opportunities. + +## Epilogue + +I was planning to write a section with a formalisation of the language's +semantics and type system, but it turns out I'm no mathematician, no +matter how hard I pretend. Maybe in the future. + +Our code generator is wholly uninteresting, and, most of all, a +placeholder: This is why it is not described in detail (that is, at all) +in this post. I plan to write a follow-up when we actually finish the +native code generator. + +As previously mentioned, the compiler _is_ open source: the code is +[here][2]. I recommend using the [Nix package manager][9] to acquire the +Haskell dependencies, but Cabal should work too. Current work in +rewriting the parser is happening in the `feature/alex-happy` branch. + +[^1]: This sounds fancy, but in practice, it boils down to using + `Data.IntSet`{.haskell} instead of `Data.Set`{.haskell}. + +[^2]: As shown [here][6]. Yes, the error messages need improvement. + +[^3]: As explained in [this blog post][8]. + +[^4]: Dimitrios Vytiniotis, Simon Peyton Jones, Tom Schrijvers, + and Martin Sulzmann. 2011. [OutsideIn(X): Modular Type Inference With + Local Assumptions][1]. _Note that, although the paper has been + published in the Journal of Functional Programming, the version linked + to here is a preprint._ + +[^5]: Dimitrios Vytiniotis, Simon Peyton Jones, Tom Schrijvers. 2010. + [Let Should not be Generalised][7]. + +[1]: +[2]: +[3]: +[4]: +[5]: +[6]: +[7]: +[8]: +[9]: diff --git a/pages/posts/2018-02-18-amulet-tc2.md b/pages/posts/2018-02-18-amulet-tc2.md new file mode 100644 index 0000000..baceb13 --- /dev/null +++ b/pages/posts/2018-02-18-amulet-tc2.md @@ -0,0 +1,610 @@ +--- +title: Amulet's New Type Checker +date: February 18, 2018 +synopsys: 2 +--- + +In the last post about Amulet I wrote about rewriting the type checking +code. And, to everybody's surprise (including myself), I actually did +it. + +Like all good programming languages, Amulet has a strong, static type +system. What most other languages do not have, however, is (mostly) +_full type inference_: programs are still type-checked despite (mostly) +having no type annotations. + +Unfortunately, no practical type system has truly "full type inference": +features like data-type declarations, integral to actually writing +software, mandate some type annotations (in this case, constructor +arguments). However, that doesn't mean we can't try. + +The new type checker, based on a constraint-generating but +_bidirectional_ approach, can type a lot more programs than the older, +Algorithm W-derived, quite buggy checker. As an example, consider the +following definition. For this to check under the old type system, one +would need to annotate both arguments to `map` _and_ its return type - +clearly undesirable! + +```ocaml +let map f = + let go cont xs = + match xs with + | Nil -> cont Nil + | Cons (h, t) -> go (compose cont (fun x -> Cons (f h, x))) t + in go id ;; +``` + +Even more egregious is that the η-reduction of `map` would lead to an +ill-typed program. + +```ocaml +let map f xs = + let go cont xs = (* elided *) + in go id xs ;; +(* map : forall 'a 'b. ('a -> 'b) -> list 'a -> list 'b *) + +let map' f = + let go cont xs = (* elided *) + in go id ;; +(* map' : forall 'a 'b 'c. ('a -> 'b) -> list 'a -> list 'c *) +``` + +Having declared this unacceptable, I set out to rewrite the type +checker, after months of procrastination. As is the case, of course, +with such things, it only took some two hours, and I really shouldn't have +procrastinated it for so long. + +Perhaps more importantly, the new type checker also supports rank-N +polymorphism directly, with all appropriate checks in place: expressions +checked against a polymorphic type are, in reality, checked against a +_deeply skolemised_ version of that poly-type - this lets us enforce two +key properties: + +1. the expression being checked _is_ actually parametric over the type +arguments, i.e., it can't unify the skolem constants with any type +constructors, and +2. no rank-N arguments escape. + +As an example, consider the following function: + +```ocaml +let rankn (f : forall 'a. 'a -> 'a) = f () +``` + +Well-typed uses of this function are limited to applying it to the +identity function, as parametricity tells us; and, indeed, trying to +apply it to e.g. `fun x -> x + 1`{.ocaml} is a type error. + +### The Solver + +As before, type checking is done by a traversal of the syntax tree +which, by use of a `Writer`{.haskell} monad, produces a list of +constraints to be solved. Note that a _list_ really is needed: a set, or +similar data structure with unspecified order, will not do. The order in +which the solver processes constraints is important! + +The support for rank-N types has lead to the solver needing to know +about a new kind of constraint: _subsumption_ constraints, in addition +to _unification_ constraints. Subsumption is perhaps too fancy a term, +used to obscure what's really going on: subtyping. However, whilst +languages like Java and Scala introduce subtyping by means of +inheritance, our subtyping boils down to eliminating ∀s. + +∀s are eliminated from the right-hand-side of subsumption constraints by +_deep skolemisation_: replacing the quantified variables in the type +with fresh type constants. The "depth" of skolemisation refers to the +fact that ∀s to the right of arrows are eliminated along with the ones +at top-level. + +```haskell +subsumes k t1 t2@TyForall{} = do + t2' <- skolemise t2 + subsumes k t1 t2' +subsumes k t1@TyForall{} t2 = do + (_, _, t1') <- instantiate t1 + subsumes k t1' t2 +subsumes k a b = k a b +``` + +The function for computing subtyping is parametric over what to do in +the case of two monomorphic types: when this function is actually used +by the solving algorithm, it's applied to `unify`. + +The unifier has the job of traversing two types in tandem to find the +_most general unifier_: a substitution that, when applied to one type, +will make it syntatically equal to the other. In most of the type +checker, when two types need to be "equal", they're equal up to +unification. + +Most of the cases are an entirely boring traversal, so here are the +interesting ones. + +- Skolem type constants only unify with other skolem type constants: +```haskell +unify TySkol{} TySkol{} = pure () +unify t@TySkol{} b = throwError $ SkolBinding t b +unify b t@TySkol{} = throwError $ SkolBinding t b +``` + +- Type variables extend the substitution: +```haskell +unify (TyVar a) b = bind a b +unify a (TyVar b) = bind b a +``` + +- Polymorphic types unify up to α-renaming: +```haskell +unify t@(TyForall vs ty) t'@(TyForall vs' ty') + | length vs /= length vs' = throwError (NotEqual t t') + | otherwise = do + fvs <- replicateM (length vs) freshTV + let subst = Map.fromList . flip zip fvs + unify (apply (subst vs) ty) (apply (subst vs') ty') +``` + +When binding a variable to a concrete type, an _occurs check_ is +performed to make sure the substitution isn't going to end up containing +an infinite type. Consider binding `'a := list 'a`: If `'a` is +substituted for `list 'a` everywhere, the result would be `list (list +'a)` - but wait, `'a` appears there, so it'd be substituted again, ad +infinitum. + +Extra care is also needed when binding a variable to itself, as is the +case with `'a ~ 'a`. These constraints are trivially discharged, but +adding them to the substitution would mean an infinite loop! + +```haskell +occurs :: Var Typed -> Type Typed -> Bool +occurs _ (TyVar _) = False +occurs x e = x `Set.member` ftv e +``` + +If the variable has already been bound, the new type is unified with the +one present in the substitution being accumulated. Otherwise, it is +added to the substitution. + +```haskell +bind :: Var Typed -> Type Typed -> SolveM () +bind var ty + | occurs var ty = throwError (Occurs var ty) + | TyVar var == ty = pure () + | otherwise = do + env <- get + -- Attempt to extend the environment, otherwise + -- unify with existing type + case Map.lookup var env of + Nothing -> put (Map.singleton var (normType ty) `compose` env) + Just ty' + | ty' == ty -> pure () + | otherwise -> unify (normType ty) (normType ty') +``` + +Running the solver, then, amounts to folding through the constraints in +order, applying the substitution created at each step to the remaining +constraints while also accumulating it to end up at the most general +unifier. + +```haskell +solve :: Int -> Subst Typed + -> [Constraint Typed] + -> Either TypeError (Subst Typed) +solve _ s [] = pure s +solve i s (ConUnify e a t:xs) = do + case runSolve i s (unify (normType a) (normType t)) of + Left err -> Left (ArisingFrom err e) + Right (i', s') -> solve i' (s' `compose` s) (apply s' xs) +solve i s (ConSubsume e a b:xs) = + case runSolve i s (subsumes unify (normType a) (normType b)) of + Left err -> Left (ArisingFrom err e) + Right (i', s') -> solve i' (s' `compose` s) (apply s' xs) +``` + +### Inferring and Checking Patterns + +Amulet, being a member of the ML family, does most data processing +through _pattern matching_, and so, the patterns also need to be type +checked. + +The pattern grammar is simple: it's made up of 6 constructors, while +expressions are described by over twenty constructors. + +Here, the bidirectional approach to inference starts to shine. It is +possible to have different behaviours for when the type of the +pattern (or, at least, some skeleton describing that type) is known +and for when it is not, and such a type must be produced from the +pattern alone. + +In an unification-based system like ours, the inference judgement can be +recovered from the checking judgement by checking against a fresh type +variable. + +```haskell +inferPattern p = do + x <- freshTV + (p', binds) <- checkPattern p x + pure (p', x, binds) +``` + +Inferring patterns produces three things: an annotated pattern, since +syntax trees after type checking carry their types; the type of values +that pattern matches; and a list of variables the pattern binds. +Checking omits returning the type, and yields only the annotated syntax +tree and the list of bindings. + +As a special case, inferring patterns with type signatures overrides the +checking behaviour. The stated type is kind-checked (to verify its +integrity and to produce an annotated tree), then verified to be a +subtype of the inferred type for that pattern. + +```haskell +inferPattern pat@(PType p t ann) = do + (p', pt, vs) <- inferPattern p + (t', _) <- resolveKind t + _ <- subsumes pat t' pt -- t' ≤ pt + case p' of + Capture v _ -> pure (PType p' t' (ann, t'), t', [(v, t')]) + _ -> pure (PType p' t' (ann, t'), t', vs) +``` + +Checking patterns is where the fun actually happens. Checking `Wildcard`s +and `Capture`s is pretty much identical, except the latter actually +expands the capture list. + +```haskell +checkPattern (Wildcard ann) ty = pure (Wildcard (ann, ty), []) +checkPattern (Capture v ann) ty = + pure (Capture (TvName v) (ann, ty), [(TvName v, ty)]) +``` + +Checking a `Destructure` looks up the type of the constructor in the +environment, possibly instancing it, and does one of two things, +depending on whether or not the destructuring did not have an inner +pattern. + +```haskell +checkPattern ex@(Destructure con ps ann) ty = + case ps of +``` + +- If there was no inner pattern, then the looked-up type is unified with +the "goal" type - the one being checked against. + +```haskell + Nothing -> do + pty <- lookupTy con + _ <- unify ex pty ty + pure (Destructure (TvName con) Nothing (ann, pty), []) +``` + +- If there _was_ an inner pattern, we proceed by decomposing the type +looked up from the environment. The inner pattern is checked against the +_domain_ of the constructor's type, while the "goal" gets unified with +the _co-domain_. + +```haskell + Just p -> do + (c, d) <- decompose ex _TyArr =<< lookupTy con + (ps', b) <- checkPattern p c + _ <- unify ex ty d +``` + +Checking tuple patterns is a bit of a mess. This is because of a +mismatch between how they're written and how they're typed: a 3-tuple +pattern (and expression!) is written like `(a, b, c)`, but it's _typed_ +like `a * (b * c)`. There is a local helper that incrementally converts +between the representations by repeatedly decomposing the goal type. + +```haskell +checkPattern pt@(PTuple elems ann) ty = + let go [x] t = (:[]) <$> checkPattern x t + go (x:xs) t = do + (left, right) <- decompose pt _TyTuple t + (:) <$> checkPattern x left <*> go xs right + go [] _ = error "malformed tuple in checkPattern" +``` + +Even more fun is the `PTuple` constructor is woefully overloaded: One +with an empty list of children represents matching against `unit`{.ml}. +One with a single child is equivalent to the contained pattern; Only one +with more than two contained patterns makes a proper tuple. + +```haskell + in case elems of + [] -> do + _ <- unify pt ty tyUnit + pure (PTuple [] (ann, tyUnit), []) + [x] -> checkPattern x ty + xs -> do + (ps, concat -> binds) <- unzip <$> go xs ty + pure (PTuple ps (ann, ty), binds) +``` + +### Inferring and Checking Expressions + +Expressions are incredibly awful and the bane of my existence. There are +18 distinct cases of expression to consider, a number which only seems +to be going up with modules and the like in the pipeline; this +translates to 24 distinct cases in the type checker to account for all +of the possibilities. + +As with patterns, expression checking is bidirectional; and, again, +there are a lot more checking cases then there are inference cases. So, +let's start with the latter. + +#### Inferring Expressions + +Inferring variable references makes use of instantiation to generate +fresh type variables for each top-level universal quantifier in the +type. These fresh variables will then be either bound to something by +the solver or universally quantified over in case they escape. + +Since Amulet is desugared into a core language resembling predicative +System F, variable uses also lead to the generation of corresponding +type applications - one for each eliminated quantified variable. + +```haskell +infer expr@(VarRef k a) = do + (inst, old, new) <- lookupTy' k + if Map.null inst + then pure (VarRef (TvName k) (a, new), new) + else mkTyApps expr inst old new +``` + +Functions, strangely enough, have both checking _and_ inference +judgements: which is used impacts what constraints will be generated, +and that may end up making type inference more efficient (by allocating +less, or correspondingly spending less time in the solver). + +The pattern inference judgement is used to compute the type and bindings +of the function's formal parameter, and the body is inferred in the +context extended with those bindings; Then, a function type is +assembled. + +```haskell +infer (Fun p e an) = do + (p', dom, ms) <- inferPattern p + (e', cod) <- extendMany ms $ infer e + pure (Fun p' e' (an, TyArr dom cod), TyArr dom cod) +``` + +Literals are pretty self-explanatory: Figuring their types boils down to +pattern matching. + +```haskell +infer (Literal l an) = pure (Literal l (an, ty), ty) where + ty = case l of + LiInt{} -> tyInt + LiStr{} -> tyString + LiBool{} -> tyBool + LiUnit{} -> tyUnit +``` + +The inference judgement for _expressions_ with type signatures is very similar +to the one for patterns with type signatures: The type is kind-checked, +then compared against the inferred type for that expression. Since +expression syntax trees also need to be annotated, they are `correct`ed +here. + +```haskell +infer expr@(Ascription e ty an) = do + (ty', _) <- resolveKind ty + (e', et) <- infer e + _ <- subsumes expr ty' et + pure (Ascription (correct ty' e') ty' (an, ty'), ty') +``` + +There is also a judgement for turning checking into inference, again by +making a fresh type variable. + +```haskell +infer ex = do + x <- freshTV + ex' <- check ex x + pure (ex', x) +``` + +#### Checking Expressions + +Our rule for eliminating ∀s was adapted from the paper [Complete +and Easy Bidirectional Typechecking for Higher-Rank Polymorphism]. +Unlike in that paper, however, we do not have explicit _existential +variables_ in contexts, and so must check expressions against +deeply-skolemised types to eliminate the universal quantifiers. + +[Complete and Easy Bidirectional Typechecking for Higher-Rank +Polymorphism]: https://www.cl.cam.ac.uk/~nk480/bidir.pdf + +```haskell +check e ty@TyForall{} = do + e' <- check e =<< skolemise ty + pure (correct ty e') +``` + +If the expression is checked against a deeply skolemised version of the +type, however, it will be tagged with that, while it needs to be tagged +with the universally-quantified type. So, it is `correct`ed. + +Amulet has rudimentary support for _typed holes_, as in dependently +typed languages and, more recently, GHC. Since printing the type of +holes during type checking would be entirely uninformative due to +half-solved types, reporting them is deferred to after checking. + +Of course, holes must still have checking behaviour: They take whatever +type they're checked against. + +```haskell +check (Hole v a) t = pure (Hole (TvName v) (a, t)) +``` + +Checking functions is as easy as inferring them: The goal type is split +between domain and codomain; the pattern is checked against the domain, +while the body is checked against the codomain, with the pattern's +bindings in scope. + +```haskell +check ex@(Fun p b a) ty = do + (dom, cod) <- decompose ex _TyArr ty + (p', ms) <- checkPattern p dom + Fun p' <$> extendMany ms (check b cod) <*> pure (a, ty) +``` + +Empty `begin end` blocks are an error. + +``` +check ex@(Begin [] _) _ = throwError (EmptyBegin ex) +``` + +`begin ... end` blocks with at least one expression are checked by +inferring the types of every expression but the last, and then checking +the last expression in the block against the goal type. + +```haskell +check (Begin xs a) t = do + let start = init xs + end = last xs + start' <- traverse (fmap fst . infer) start + end' <- check end t + pure (Begin (start' ++ [end']) (a, t)) +``` + +`let`s are pain. Since all our `let`s are recursive by nature, they must +be checked, including all the bound variables, in a context where the +types of every variable bound there are already available; To figure +this out, however, we first need to infer the type of every variable +bound there. + +If that strikes you as "painfully recursive", you're right. This is +where the unification-based nature of our type system saved our butts: +Each bound variable in the `let` gets a fresh type variable, the context +is extended and the body checked against the goal. + +The function responsible for inferring and solving the types of +variables is `inferLetTy`. It keeps an accumulating association list to +check the types of further bindings as they are figured out, one by one, +then uses the continuation to generalise (or not) the type. + +```haskell +check (Let ns b an) t = do + ks <- for ns $ \(a, _, _) -> do + tv <- freshTV + pure (TvName a, tv) + extendMany ks $ do + (ns', ts) <- inferLetTy id ks (reverse ns) + extendMany ts $ do + b' <- check b t + pure (Let ns' b' (an, t)) +``` + +We have decided to take [the advice of Vytiniotis, Peyton Jones, and +Schrijvers], and refrain from generalising lets, except at top-level. +This is why `inferLetTy` gets given `id` when checking terms. + +[the advice of Vytiniotis, Peyton Jones, and Schrijvers]: https://www.microsoft.com/en-us/research/wp-content/uploads/2016/02/tldi10-vytiniotis.pdf + +The judgement for checking `if` expressions is what made me stick to +bidirectional type checking instead of fixing out variant of Algorithm +W. The condition is checked against the boolean type, while both +branches are checked against the goal. + +```haskell +check (If c t e an) ty = If <$> check c tyBool + <*> check t ty + <*> check e ty + <*> pure (an, ty) +``` + + +it is not possible, in general, to recover the type of a function +at an application site, we infer it; The argument given is checked +against that function's domain and the codomain is unified with the +goal type. + +```haskell +check ex@(App f x a) ty = do + (f', (d, c)) <- secondA (decompose ex _TyArr) =<< infer f + App f' <$> check x d <*> fmap (a,) (unify ex ty c) +``` + +To check `match`, the type of what's being matched against is first +inferred because, unlike application where _some_ recovery is possible, +we can not recover the type of matchees from the type of branches _at +all_. + +```haskell +check (Match t ps a) ty = do + (t', tt) <- infer t +``` + +Once we have the type of the matchee in hands, patterns can be checked +against that. The branches are then each checked against the goal type. + +```haskell + ps' <- for ps $ \(p, e) -> do + (p', ms) <- checkPattern p tt + (,) <$> pure p' <*> extendMany ms (check e ty) +``` + +Checking binary operators is like checking function application twice. +Very boring. + +```haskell +check ex@(BinOp l o r a) ty = do + (o', to) <- infer o + (el, to') <- decompose ex _TyArr to + (er, d) <- decompose ex _TyArr to' + BinOp <$> check l el <*> pure o' + <*> check r er <*> fmap (a,) (unify ex d ty) +``` + +Checking records and record extension is a hack, so I'm not going to +talk about them until I've cleaned them up reasonably in the codebase. +Record access, however, is very clean: we make up a type for the +row-polymorphic bit, and check against a record type built from the goal +and the key. + +```haskell +check (Access rc key a) ty = do + rho <- freshTV + Access <$> check rc (TyRows rho [(key, ty)]) + <*> pure key <*> pure (a, ty) +``` + +Checking tuple expressions involves a local helper much like checking +tuple patterns. The goal type is recursively decomposed and made to line +with the expression being checked. + +```haskell +check ex@(Tuple es an) ty = Tuple <$> go es ty <*> pure (an, ty) where + go [] _ = error "not a tuple" + go [x] t = (:[]) <$> check x t + go (x:xs) t = do + (left, right) <- decompose ex _TyTuple t + (:) <$> check x left <*> go xs right +``` + +And, to finish, we have a judgement for turning inference into checking. + +```haskell +check e ty = do + (e', t) <- infer e + _ <- subsumes e ty t + pure e' +``` + +### Conclusion + +I like the new type checker: it has many things you'd expect from a +typed lambda calculus, such as η-contraction preserving typability, and +substitution of `let`{.ocaml}-bound variables being generally +admissable. + +Our type system is fairly complex, what with rank-N types and higher +kinded polymorphism, so inferring programs under it is a bit of a +challenge. However, I am fairly sure the only place that demands type +annotations are higher-ranked _parameters_: uses of higher-rank +functions are checked without the need for annotations. + +Check out [Amulet] the next time you're looking for a typed functional +programming language that still can't compile to actual executables. + +[Amulet]: https://github.com/zardyh/amulet diff --git a/pages/posts/2018-03-14-amulet-safety.md b/pages/posts/2018-03-14-amulet-safety.md new file mode 100644 index 0000000..f06cab1 --- /dev/null +++ b/pages/posts/2018-03-14-amulet-safety.md @@ -0,0 +1,286 @@ +--- +title: Amulet and Language Safety +date: March 14, 2018 +--- + +Ever since its inception, Amulet has strived to be a language that +_guarantees_ safety, to some extent, with its strong, static, inferred +type system. Through polymorphism we gain the concept of +_parametricity_, as explained in Philip Wadler's [Theorems for Free]: a +function's behaviour does not depend on the instantiations you perform. + +However, the power-to-weight ratio of these features quickly plummets, +as every complicated type system extension makes inference rather +undecidable, which in turn mandates more and more type annotations. Of +the complex extensions I have read about, three struck me as +particularly elegant, and I have chosen to implement them all in Amulet: + +- Generalised Algebraic Data Types, which this post is about; +- Row Polymorphism, which allows being precise about which structure + fields a function uses; and +- Rank-N types, which enables the implementation of many concepts + including monadic regions. + +Both GADTs and rank-N types are in the "high weight" category: inference +in the presence of both is undecidable. Adding support for the latter +(which laid the foundations for the former) is what drove me to re-write +the type checker, a crusade detailed in [my last post]. + +Of course, in the grand scheme of things, some languages provide way +more guarantees than Amulet: For instance, Rust, with its lifetime +system, can prove that code is memory-safe at compile time; +Dependently-typed languages such as Agda and Idris can express a lot of +invariants in their type system, but inference is completely destroyed. +Picking which features you'd like to support is a game of +tradeoffs---all of them have benefits, but some have exceedingly high +costs. + +Amulet was originally based on a very traditional, HM-like type system +with support for row polymorphism. The addition of rank-N polymorphism +and GADTs instigated the move to a bidirectional system, which in turn +provided us with the ability to experiment with a lot more type system +extensions (for instance, linear types)---in pursuit of more guarantees +like parametricity. + +GADTs +===== + +In a sense, generalised ADTs are a "miniature" version of the inductive +families one would find in dependently-typed programming (and, indeed, +Amulet can type-check _some_ uses of length-indexed vectors, although +the lack of type-level computation is a showstopper). They allow +non-uniformity in the return types of constructors, by packaging +"coercions" along with the values; pattern matching, then, allows these +coercions to influence the solving of particular branches. + +Since this is an introduction to indexed types, I am legally obligated +to present the following three examples: the type of equality witnesses +between two other types; higher-order abstract syntax, the type of +well-formed terms in some language; and _vectors_, the type of linked +lists with statically-known lengths. + +#### Equality + +As is tradition in intuitionistic type theory, we define equality by +postulating (that is, introducing a _constructor_ witnessing) +reflexivity: anything is equal to itself. Symmetry and transitivity can +be defined as ordinary pattern-matching functions. However, this +demonstrates the first (and main) shortcoming of our implementation: +Functions which perform pattern matching on generalised constructors +_must_ have explicitly stated types.[^1] + +```ocaml +type eq 'a 'b = + | Refl : eq 'a 'a ;; + +let sym (Refl : eq 'a 'b) : eq 'b 'a = Refl ;; +let trans (Refl : eq 'a 'b) (Refl : eq 'b 'c) : eq 'a 'c = Refl ;; +``` + +Equality, when implemented like this, is conventionally used to +implement substitution: If there exists a proof that `a` and `b` are +equal, any `a` may be treated as a `b`. + +```ocaml +let subst (Refl : eq 'a 'b) (x : 'a) : 'b = x ;; +``` + +Despite `a` and `b` being distinct, _rigid_ type variables, matching on +`Refl` allows the constraint solver to treat them as equal. + +#### Vectors + +```ocaml +type z ;; (* the natural zero *) +type s 'k ;; (* the successor of a number *) +type vect 'n 'a = (* vectors of length n *) + | Nil : vect z 'a + | Cons : 'a * vect 'k 'a -> vect (s 'k) 'a +``` + +Parametricity can tell us many useful things about functions. For +instance, all closed, non-looping inhabitants of the type `forall 'a. 'a +-> 'a` are operationally the identity function. However, expanding the +type grammar tends to _weaken_ parametricity before making it stronger. +Consider the type `forall 'a. list 'a -> list 'a`{.ocaml}---it has +several possible implementations: One could return the list unchanged, +return the empty list, duplicate every element in the list, drop some +elements around the middle, among _many_ other possible behaviours. + +Indexed types are beyond the point of weakening parametricity, and start +to make it strong again. Consider a function of type `forall 'a 'n. ('a +-> 'a -> ordering) -> vect 'n 'a -> vect 'n 'a`{.ocaml}---by making the +length of the vector explicit in the type, and requiring it to be kept +the same, we have ruled out any implementations that drop or duplicate +elements. A win, for sure, but at what cost? An implementation of +insertion sort for traditional lists looks like this, when implemented +in Amulet: + +```ocaml +let insert_sort cmp l = + let insert e tl = + match tl with + | Nil -> Cons (e, Nil) + | Cons (h, t) -> match cmp e h with + | Lt -> Cons (e, Cons (h, t)) + | Gt -> Cons (h, insert e t) + | Eq -> Cons (e, Cons (h, t)) + and go l = match l with + | Nil -> Nil + | Cons (x, xs) -> insert x (go xs) + in go l ;; +``` + +The implementation for vectors, on the other hand, is full of _noise_: +type signatures which we would rather not write, but are forced to by +the nature of type systems. + +```ocaml +let insert_sort (cmp : 'a -> 'a -> ordering) (v : vect 'n 'a) : vect 'n 'a = + let insert (e : 'a) (tl : vect 'k 'a) : vect (s 'k) 'a = + match tl with + | Nil -> Cons (e, Nil) + | Cons (h, t) -> match cmp e h with + | Lt -> Cons (e, Cons (h, t)) + | Gt -> Cons (h, insert e t) + | Eq -> Cons (e, Cons (h, t)) + and go (v : vect 'k 'a) : vect 'k 'a = match v with + | Nil -> Nil + | Cons (x, xs) -> insert x (go xs) + in go v ;; +``` + +These are not quite theorems for free, but they are theorems for quite +cheap. + +#### Well-Typed Terms + +```ocaml +type term 'a = + | Lit : int -> term int + | Fun : ('a -> 'b) -> term ('a -> 'b) + | App : term ('a -> 'b) * term 'a -> term 'b +``` + +In much the same way as the vector example, which forced us to be +correct with our functions, GADTs can also be applied in making us be +correct with our _data_. The type `term 'a` represents well typed terms: +the interpretation of such a value need not be concerned with runtime +errors at all, by leveraging the Amulet type system to make sure its +inputs are correct. + +``` +let eval (x : term 'a) : 'a = + match x with + | Lit l -> l + | Fun f -> f + | App (f, x) -> (eval f) (eval x) +``` + + +While equalities let us bend the type system to our will, vectors and +terms let _the type system_ help us, in making incorrect implementations +compile errors. + +Rank-N Types +============ + +Rank-N types are quite useful, I'm sure. To be quite honest, they were +mostly implemented in preparation for GADTs, as the features have some +overlap. + +A use case one might imagine if Amulet had notation for monads would be +[an implementation of the ST monad][^2], which prevents mutable state +from escaping by use of rank-N types. `St.run action` is a well-typed +program, since `action` has type `forall 's. st 's int`, but `St.run +action'` is not, since that has type `forall 's. st 's (ref 's int)`. + +```ocaml +let action = + St.bind (alloc_ref 123) (fun var -> + St.bind (update_ref var (fun x -> x * 2)) (fun () -> + read_ref var)) +and action' = + St.bind (alloc_ref 123) (fun var -> + St.bind (update_ref var (fun x -> x * 2)) (fun () -> + St.pure var)) +``` + +Conclusion +========== + +Types are very powerful things. A powerful type system helps guide the +programmer by allowing the compiler to infer more and more of the +_program_---type class dictionaries in Haskell, and as a more extreme +example, proof search in Agda and Idris. + +However, since the industry has long been dominated by painfully +first-order, very verbose type systems like those of Java and C#, it's +no surprise that many programmers have fled to dynamically typed +languages like ~~Go~~ Python---a type system needs to be fairly complex +before it gets to being expressive, and it needs to be _very_ complex to +get to the point of being useful. + +Complexity and difficulty, while often present together, are not +nescessarily interdependent: Take, for instance, Standard ML. The +first-order parametric types might seem restrictive when used to a +system with like Haskell's (or, to some extent, Amulet's[^3]), but they +actually allow a lot of flexibility, and do not need many annotations at +all! They are a sweet spot in the design space. + +If I knew more about statistics, I'd have some charts here correlating +programmer effort with implementor effort, and also programmer effort +with the extent of properties one can state as types. Of course, these +are all fuzzy metrics, and no amount of statistics would make those +charts accurate, so have my feelings in prose instead: + +- Implementing a dynamic type system is _literally_ no effort. No effort +needs to be spent writing an inference engine, or a constraint solver, +or a renamer, or any other of the very complex moving parts of a type +checker. + + However, the freedom they allow the implementor they take away from + the programmer, by forcing them to keep track of the types of + everything mentally. Even those that swear by dynamic types can not + refute the claim that data has shape, and having a compiler that can + make sure your shapes line up so you can focus on programming is a + definite advantage. + +- On the opposite end of the spectrum, implementing a dependent type +system is a _lot_ of effort. Things quickly diverge into undecidability +before you even get to writing a solver---and higher order unification, +which has a tendency to pop up, is undecidable too. + + While the implementor is subject to an endless stream of suffering, + the programmer is in some ways free and some ways constrained. They + can now express lots of invariants in the type system, from + correctness of `sort` to correctness of [an entire compiler] or an + [operating system kernel], but they must also state very precise types + for everything. + +- In the middle lies a land of convenient programming without an +endlessly suffering compiler author, a land first explored by the ML +family with its polymorphic, inferred type system. + + This is clearly the sweet spot. Amulet leans slightly to the + dependently type end of the spectrum, but can still infer the types + for many simple and complex programs without any annotations-the + programs that do not use generalised algebraic data types or rank-N + polymorphism. + +[Theorems for Free]: https://people.mpi-sws.org/~dreyer/tor/papers/wadler.pdf +[my last post]: /posts/2018-02-18.html +[an implementation of the ST monad]: https://txt.amelia.how/st-monad.ml.html +[an entire compiler]: http://compcert.inria.fr/ +[operating system kernel]: https://sel4.systems/ + +[^1]: In reality, the details are fuzzier. To be precise, pattern +matching on GADTs only introduces an implication constraint when the +type checker is applying a checking judgement. In practice, this means +that at least the return type must be explicitly annotated. + +[^2]: Be warned that the example does not compile unless you remove the +modules, since our renamer is currently a bit daft. + +[^3]: This is _my_ blog, and I'm allowed to brag about my projects, damn +it. diff --git a/pages/posts/2018-03-27-amulet-gadts.md b/pages/posts/2018-03-27-amulet-gadts.md new file mode 100644 index 0000000..a7678e3 --- /dev/null +++ b/pages/posts/2018-03-27-amulet-gadts.md @@ -0,0 +1,247 @@ +--- +title: GADTs and Amulet +date: March 27, 2018 +maths: true +--- + +Dependent types are a very useful feature - the gold standard of +enforcing invariants at compile time. However, they are still very much +not practical, especially considering inference for unrestricted +dependent types is equivalent to higher-order unification, which was +proven to be undecidable. + +Fortunately, many of the benefits that dependent types bring aren't +because of dependent products themselves, but instead because of +associated features commonly present in those programming languages. One +of these, which also happens to be especially easy to mimic, are +_inductive families_, a generalisation of inductive data types: instead +of defining a single type inductively, one defines an entire _family_ of +related types. + +Many use cases for inductive families are actually instances of a rather +less general concept, that of generalised algebraic data types, or +GADTs: Contrary to the indexed data types of full dependently typed +languages, these can and are implemented in several languages with +extensive inference, such as Haskell, OCaml and, now, Amulet. + +Before I can talk about their implementation, I am legally obligated to +present the example of _length indexed vectors_, linked structures whose +size is known at compile time---instead of carrying around an integer +representing the number of elements, it is represented in the type-level +by a Peano[^1] natural number, as an _index_ to the vector type. By +universally quantifying over the index, we can guarantee by +parametricity[^2] that functions operating on these don't do inappropriate +things to the sizes of vectors. + +```ocaml +type z ;; +type s 'k ;; +type vect 'n 'a = + | Nil : vect z 'a + | Cons : 'a * vect 'k 'a -> vect (s 'k) 'a +``` + +Since the argument `'n` to `vect` (its length) varies with the constructor one +chooses, we call it an _index_; On the other hand, `'a`, being uniform over all +constructors, is called a _parameter_ (because the type is _parametric_ over +the choice of `'a`). These definitions bake the measure of length into +the type of vectors: an empty vector has length 0, and adding an element +to the front of some other vector increases the length by 1. + +Matching on a vector reveals its index: in the `Nil` case, it's possible +to (locally) conclude that it had length `z`. Meanwhile, the `Cons` case +lets us infer that the length was the successor of some other natural +number, `s 'k`, and that the tail itself has length `'k`. + +If one were to write a function to `map` a function over a `vect`or, +they would be bound by the type system to write a correct implementation +- well, either that or going out of their way to make a bogus one. It +would be possible to enforce total correctness of a function such as +this one, by adding linear types and making the vector parameter linear. + +```ocaml +let map (f : 'a -> 'b) (xs : vect 'n 'a) : vect 'n 'b = + match xs with + | Nil -> Nil + | Cons (x, xs) -> Cons (f x, map f xs) ;; +``` + +If we were to, say, duplicate every element in the list, an error would +be reported. Unlike some others, this one is not very clear, and it +definitely could be improved. + +``` + Occurs check: The type variable jx + occurs in the type s 'jx + · Arising from use of the expression + Cons (f x, Cons (f x, map f xs)) + │ + 33 │ | Cons (x, xs) -> Cons (f x, Cons (f x, map f xs)) ;; + │ ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +``` + +This highlights the essence of GADTs: pattern matching on them reveals +equalities about types that the solver can later exploit. This is what +allows the programmer to write functions that vary their return types +based on their inputs - a very limited form of type-term dependency, +which brings us ever closer to the Calculus of Constructions corner of +Barendregt's lambda cube[^3]. + +The addition of generalised algebraic data types has been in planning +for over two years---it was in the original design document. In a +mission that not even my collaborator noticed, all of the recently-added +type system and IR features were directed towards enabling the GADT +work: bidirectional type checking, rank-N polymorphism and coercions. + +All of these features had cover stories: higher-ranked polymorphism was +motivated by monadic regions; bidirectional type checking was motivated +by the aforementioned polymorphism; and coercions were motivated by +newtype optimisation. But, in reality, it was a conspiracy to make GADTs +possible: having support for these features simplified implementing our +most recent form of fancy types, and while adding all of these in one go +would be possible, doing it incrementally was a lot saner. + +While neither higher-ranked types nor GADTs technically demand a +bidirectional type system, implementing them with such a specification +is considerably easier, removing the need for workarounds such as boxy +types and a distinction between rigid/wobbly type variables. Our +algorithm for GADT inference rather resembles Richard Eisenberg's +[Bake]{.textsc}[^4], in that it only uses local equalities in _checking_ +mode. + +Adding GADTs also lead directly to a rewrite of the solver, which now +has to work with _implication constraints_, of the form `(Q₁, ..., Qₙ) +=> Q`, which should be read as "Assuming `Q₁` through `Qₙ`, conclude +`Q`." Pattern matching on generalised constructors, in checking mode, +captures every constraint generated by checking the right-hand side of a +clause and captures that as an implication constraint, with all the +constructor-bound equalities as assumptions. As an example, this lets us +write a type-safe cast function: + +```ocaml +type eq 'a 'b = Refl : eq 'a 'a +(* an inhabitant of eq 'a 'b is a proof that 'a and 'b are equal *) + +let subst (Refl : eq 'a 'b) (x : 'a) : 'b = x ;; +``` + +Unfortunately, to keep inference decidable, many functions that depend +on generalised pattern matching need explicit type annotations, to guide +the type checker. + +When _checking_ the body of the function, namely the variable reference +`x`, the solver is working under an assumption `'a ~ 'b` (i.e., `'a` and +`'b` stand for the same type), which lets us unify the stated type of +`x`, namely `'a`, with the return type of the function, `'b`. + +If we remove the local assumption, say, by not matching on +`Refl`{.haskell}, the solver will not be allowed to unify the two type +variables `'a` and `'b`, and an error message will be reported[^6]: + +``` +examples/gadt/equality.ml[11:43 ..11:43]: error + Can not unify rigid type variable b with the rigid type variable a + · Note: the variable b was rigidified because of a type ascription + against the type forall 'a 'b. t 'a 'b -> 'a -> 'b + and is represented by the constant bq + · Note: the rigid type variable a, in turn, + was rigidified because of a type ascription + against the type forall 'a 'b. t 'a 'b -> 'a -> 'b + · Arising from use of the expression + x + │ + 11 │ let subst (_ : t 'a 'b) (x : 'a) : 'b = x ;; + │ ~ +``` + +Our intermediate language was also extended, from a straightforward +System F-like lambda calculus with type abstractions and applications, +to a System FC-like system with _coercions_, _casts_, and +_coercion abstraction_. Coercions are the evidence, produced by the +solver, that an expression is usable as a given type---GADT patterns +bind coercions like these, which are the "reification" of an implication +constraint. This lets us make type-checking on the intermediate language +fast and decidable[^5], as a useful sanity check. + +The two new judgements for GADT inference correspond directly to new +cases in the `infer` and `check` functions, the latter of which I +present here for completeness. The simplicity of this change serves as +concrete evidence of the claim that bidirectional systems extend readily +to new, complex features, producing maintainable and readable code. + +```haskell +check (Match t ps a) ty = do + (t, tt) <- infer t + ps <- for ps $ \(p, e) -> do + (p', ms, cs) <- checkPattern p tt + let tvs = Set.map unTvName (boundTvs p' ms) + (p',) <$> implies (Arm p e) tt cs + (local (typeVars %~ Set.union tvs) + (extendMany ms (check e ty))) + pure (Match t ps (a, ty)) +``` + +This corresponds to the checking judgement for matches, presented below. +Note that in my (rather informal) theoretical presentation of Amulet +typing judgements, we present implication constraints as a lexical scope +of equalities conjoined with the scope of variables; Inference +judgements (with double right arrows, $\Rightarrow$) correspond to uses of +`infer`, pattern checking judgements ($\Leftarrow_\text{pat}$) +correspond to `checkPattern`, which also doubles as $\mathtt{binds}$ and +$\mathtt{cons}$, and the main checking judgement $\Leftarrow$ is the +function `check`. + +$$ +\frac{\Gamma; \mathscr{Q} \vdash e \Rightarrow \tau +\quad \Gamma \vdash p_i \Leftarrow_\text{pat} \tau +\quad \Gamma, \mathtt{binds}(p_i); \mathscr{Q}, \mathtt{cons}(p_i) +\vdash e_i \Leftarrow \sigma} +{\Gamma; \mathscr{Q} \vdash \mathtt{match}\ e\ \mathtt{with}\ \{p_i \to +e_i\} \Leftarrow \sigma} +$$ + +Our implementation of the type checker is a bit more complex, because it +also does (some) elaboration and bookkeeping: tagging terms with types, +blaming type errors correctly, etc. + +--- + +This new, complicated feature was a lot harder to implement than +originally expected, but in the end it worked out. GADTs let us make the +type system _stronger_, while maintaining the decidable inference that +the non-fancy subset of the language enjoys. + +The example presented here was the most boring one possible, mostly +because [two weeks ago] I wrote about their impact on the language's +ability to make things safer. + +[^1]: Peano naturals are one particular formulation of the natural +numbers, which postulates that zero (denoted `z` above) is a natural +number, and any natural number's successor (denoted `s 'k` above) is +itself natural. + +[^2]: This is one application of Philip Wadler's [Theorems for Free] +technique: given a (polymorphic) type of some function, we can derive +much of its behaviour. + +[^3]: Amulet is currently somewhere on the edge between λ2 - the second +order lambda calculus, System F, and λP2, a system that allows +quantification over types and terms using the dependent product form, +which subsumes both the ∀ binder and the → arrow. Our lack of type +functions currently leaves us very far from the CoC. + +[^4]: See [his thesis]. Our algorithm, of course, has the huge +simplification of not having to deal with full dependent types. + +[^5]: Even if we don't do it yet---work is still ongoing to make the +type checker and solver sane. + +[^6]: And quite a good one, if I do say so! The compiler +syntax highlights and pretty-prints both terms and types relevant to the +error, as you can see [here]. + +[Theorems for Free]: http://homepages.inf.ed.ac.uk/wadler/topics/parametricity.html +[his thesis]: https://repository.brynmawr.edu/cgi/viewcontent.cgi?article=1074&context=compsci_pubs + +[two weeks ago]: /posts/2018-03-14.html +[here]: https://i.amelia.how/68c4d.png diff --git a/pages/posts/2018-08-11-amulet-updates.md b/pages/posts/2018-08-11-amulet-updates.md new file mode 100644 index 0000000..2e41bbd --- /dev/null +++ b/pages/posts/2018-08-11-amulet-updates.md @@ -0,0 +1,304 @@ +--- +title: Amulet updates +date: August 11, 2018 +maths: true +--- + +Jesus, it's been a while. Though my last post was almost 6 months ago +(give or take a few), I've been busy working on +[Amulet](https://github.com/tmpim/amulet), which continues to grow, +almost an eldritch abomination you try desperately, but fail, to kill. + +Since my last post, Amulet has changed a ton, in noticeable and +not-so-noticeable ways. Here are the major changes to the compiler since +then. + +Parser improvements +=================== + +No language is good to use if it's inconvenient. So, in an effort to +make writing code more convenient, we've removed the need for `;;` after +top-level declarations, and added a _bunch_ of indentation sensitivity, +thus making several keywords optional: `begin`{.ocaml} and `end`{.ocaml} +are implicit in the body of a `fun`{.ocaml}, `match`{.ocaml}, or +`let`{.ocaml}, which has made those keywords almost entirely obsolete. +The body of a `let`{.ocaml} also need not be preceded by `in`{.ocaml} if +meaning is clear from indentation. + +To demonstrate, where you would have + +```ocaml +let foo = + let bar = fun x -> begin + a; + b; + c + end in begin + bar (); + bar 1; + end ;; +``` + +One can now write + +```ocaml +let foo = + let bar = fun x -> + a + b + c + bar () + bar 1 +``` + +Moreover, we've added shorthand syntax for building and destructuring +records: `{ x, y, z }`{.ocaml} is equivalent to `{ x = x, y = y, z = z +}`{.ocaml} in both pattern and expression position. + + +Changes to record typing +======================== + +Whereas `{ x with a = b }` would extend the record `x` to contain a new +field `a` (with value `b`), it's now _monomorphic update_ of the record +`x`. That is: `x` must _already_ contain a field called `a`, with the +same type as `b`. + +This lets you write a function for updating a field in a record, such as +the one below, which would previously be impossible. Supporting +polymorphic update is not a priority, but it'd be nice to have. The way +PureScript, another language with row-polymorphic records, implements +polymorphic update does not fit in with our constraint based type +system. A new type of constraint would have to be introduced +specifically for this, which while not impossible, is certainly +annoying. + +```ocaml +let foo : forall 'row. { 'row | x : int } -> { 'row | x : int } = + fun r -> { r with x = r.x + 1 } +``` + +The impossibility of supporting polymorphic update with regular +subsumption constraints $a \le b$ stems from the fact that, when faced +with such a constraint, the compiler must produce a coercion function +that turns _any_ $a$ into a $b$ _given the types alone_. This is +possible for, say, field narrowing---just pick out the fields you want +out of a bigger record---but not for update, since the compiler has no +way of turning, for instance, an `int`{.ocaml} into a `string`{.ocaml}. + +Stronger support for Rank-N Types +================================= + +Changes to how we handle subsumption have made it possible to store +polymorphic values in not only tuples, but also general records. For +instance: + +```ocaml +let foo = { + apply : forall 'a 'b. ('a -> 'b) -> 'a -> 'b = + fun x -> x +} (* : { apply : forall 'a 'b. ('a -> 'b) -> 'a -> 'b } *) +``` + +`foo`{.ocaml} is a record containing a single polymorphic application +function. It can be used like so: + +```ocaml +let () = + let { apply } = foo + apply (+ 1) 1 + apply (fun x -> x) () +``` + +Pattern-matching Let +==================== + +A feature I've desired for a while now, `let` expressions (and +declarations!) can have a pattern as their left-hand sides, as +demonstrated above. These can be used for any ol' type, including for +cases where pattern matching would be refutable. I haven't gotten around +to actually implementing this yet, but in the future, pattern matching +in `let`s will be restricted to (arbitrary) product types only. + +```ocaml +type option 'a = Some of 'a | None +type foo = Foo of { x : int } + +let _ = + let Some x = ... (* forbidden *) + let Foo { x } = ... (* allowed *) +``` + +Even more "in-the-future", if we ever get around to adding attributes +like OCaml's, the check for this could be bypassed by annotating the +declaration with (say) a `[@partial]`{.ocaml} attribute. + +Unfortunately, since Amulet _is_ a strict language, these are a bit +limited: They can not be recursive in _any_ way, neither directly nor +indirectly. + +```ocaml +(* type error *) +let (a, b) = (b, a) + +(* similarly *) +let (a, b) = x +and x = (a, b) +``` + +Cycle detection and warnings +============================ + +A verification pass is run over the AST if type-checking succeeds, to +forbid illegal uses of recursion (strict language) and, as an additional +convenience, warn when local variables go unused. + +For instance, this is [forbidden](/static/verify_error.png): + +```ocaml +let f = (fun x -> x) g +and g = (fun x -> x) f +``` + +And this gives a [warning](/static/verify_warn.png): + +```ocaml +let _ = + let { a, b } = { a = 1, b = 2 } + () +``` + +Plans for this include termination (and/or productivity) (as a +warning) and exhaustiveness checks (as an error). + +No more `main` +============ + +Since pattern-matching `let`{.ocaml}s are allowed at top-level, there's no more +need for `main`. Instead of + +```ocaml +let main () = + ... +``` + +Just match on `()`{.ocaml} at top-level: + +```ocaml +let () = + ... +``` + +This gets rid of the (not so) subtle unsoundness introduced by the code +generator having to figure out how to invoke `main`, and the type +checker not checking that `main` has type `unit -> 'a`{.ocaml}, and also +allows us to remove much of the silly special-casing around variables +called `main`. + +```ocaml +let main x = x + 1 +(* attempt to perform arithmetic on a nil value *) +``` + +Implicit Parameters +=================== + +A bit like Scala's, these allow marking a function's parameter as +implicit and having the type checker find out what argument you meant +automatically. Their design is based on a bit of reading other compiler +code, and also the paper on modular implicits for OCaml. However, we do +not have a ML-style module system at all (much to my dismay, but it's +being worked on), much less first class modules. + +Implicit parameters allow ad-hoc overloading based on dictionary passing +(like type classes, but with less inference). + +```ocaml +type show 'a = Show of 'a -> string +let show ?(Show f) = f + +let implicit show_string = + Show (fun x -> x) + +let "foo" = show "foo" +``` + +Here, unification makes it known that `show` is looking for an implicit +argument of type `show string`{.ocaml}, and the only possibility is +`show_string`{.ocaml}, which is what gets used. + +Implicit laziness +================= + +There is a built-in type `lazy : type -> type`{.ocaml}, a function +`force : forall 'a. lazy 'a -> 'a` for turning a thunk back into a +value, and a keyword `lazy` that makes a thunk out of any expression. +`lazy 'a`{.ocaml} and `'a` are mutual subtypes of eachother, and the +compiler inserts thunks/`force`{.ocaml}s where appropriate to make code +type-check. + +```ocaml +let x && y = if x then force y else false +let () = + false && launch_the_missiles () + +(* no missiles were launched in the execution of this program *) +``` + +General refactoring +=================== + +- Literal patterns are allowed for all types, and they're tested of +using `(==)`. + +- Amulet only has one type constructor in its AST for all its kinds of +functions: `forall 'a. 'a -> int`{.ocaml}, `int -> string`{.ocaml} and `show string => +unit`{.ocaml} are all represented the same internally and disambiguated +by dependency/visibility flags. + +- Polymorphic recursion is checked using "outline types", computed +before fully-fledged inference kicks in based solely on the shape of +values. This lets us handle the function below without an annotation on +its return type by computing that `{ count = 1 }` _must_ have type `{ +count : int }` beforehand. + + Combined with the annotation on `x`, this gives us a "full" type + signature, which lets us use checking for `size`, allowing polymorphic + recursion to happen. + +~~~{.ocaml} + type nested 'a = Nested of nested ('a * 'a) * nested ('a * 'a) | One of 'a + let size (x : nested 'a) = + match x with + | One _ -> { count = 1 } + | Nested (a, _) -> { count = 2 * (size a).count } +~~~ + +- The newtype elimination pass was rewritten once and, unfortunately, +disabled, since it was broken with some touchy code. + +- Operator declarations like `let 2 + 2 = 5 in 2 + 2` are admissible. + +- Sanity of optimisations is checked at runtime by running a type +checker over the intermediate language programs after all optimisations + +- Local opens are allowed, with two syntaxes: Either +`M.( x + y)`{.ocaml} (or `M.{ a, b }`{.ocaml}) or `let open M in x + +y`{.ocaml}. + +- Amulet is inconsistent in some more ways, such as `type : type` +holding. + +- There are no more kinds. + +Conclusion +========== + +This post was a bit short, and also a bit hurried. Some of the features +here deserve better explanations, but I felt like giving them an +_outline_ (haha) would be better than leaving the blag to rot (yet +again). + +Watch out for a blog post regarding (at _least_) implicit parameters, +which will definitely involve the changes to subtyping involving +records/tuples. diff --git a/pages/posts/2019-01-28-mldelta.lhs b/pages/posts/2019-01-28-mldelta.lhs new file mode 100644 index 0000000..f176e39 --- /dev/null +++ b/pages/posts/2019-01-28-mldelta.lhs @@ -0,0 +1,329 @@ +--- +title: Compositional Typing for ML +date: January 28, 2019 +maths: true +--- +\long\def\ignore#1{} + +Compositional type-checking is a neat technique that I first saw in a +paper by Olaf Chitil[^1]. He introduces a system of principal _typings_, +as opposed to a system of principal _types_, as a way to address the bad +type errors that many functional programming languages with type systems +based on Hindley-Milner suffer from. + +Today I want to present a small type checker for a core ML (with, +notably, no data types or modules) based roughly on the ideas from that +paper. This post is _almost_ literate Haskell, but it's not a complete +program: it only implements the type checker. If you actually want to +play with the language, grab the unabridged code +[here](https://github.com/zardyh/mld). + +\ignore{ +\begin{code} +{-# LANGUAGE GeneralizedNewtypeDeriving, DerivingStrategies #-} +\end{code} +} + +--- + +\begin{code} +module Typings where + +import qualified Data.Map.Merge.Strict as Map +import qualified Data.Map.Strict as Map +import qualified Data.Set as Set + +import Data.Foldable +import Data.List +import Data.Char + +import Control.Monad.Except +\end{code} + +We'll begin, like always, by defining data structures for the language. +Now, this is a bit against my style, but this system (which I +shall call ML$\Delta$ - but only because it sounds cool) is not +presented as a pure type system - there are separate grammars for terms +and types. Assume that `Var`{.haskell} is a suitable member of all the +appropriate type classes. + +\begin{code} +data Exp + = Lam Var Exp + | App Exp Exp + | Use Var + | Let (Var, Exp) Exp + | Num Integer + deriving (Eq, Show, Ord) + +data Type + = TyVar Var + | TyFun Type Type + | TyCon Var + deriving (Eq, Show, Ord) +\end{code} + +ML$\Delta$ is _painfully_ simple: It's a lambda calculus +extended with `Let`{.haskell} since there needs to be a demonstration of +recursion and polymorphism, and numbers so there can be a base type. It +has no unusual features - in fact, it doesn't have many features at all: +no rank-N types, GADTs, type classes, row-polymorphic records, tuples or +even algebraic data types. + +I believe that a fully-featured programming language along the lines of +Haskell could be shaped out of a type system like this, however I am not +smart enough and could not find any prior literature on the topic. +Sadly, it seems that compositional typings aren't a very active area of +research at all. + +The novelty starts to show up when we define data to represent the +different kinds of scopes that crop up. There are monomorphic +$\Delta$-contexts, which assign _types_ to names, and also polymorphic +$\Gamma$-contexts, that assign _typings_ to names instead. While we're +defining `newtype`{.haskell}s over `Map`{.haskell}s, let's also get +substitutions out of the way. + +\begin{code} +newtype Delta = Delta (Map.Map Var Type) + deriving (Eq, Ord, Semigroup, Monoid) + +newtype Subst = Subst (Map.Map Var Type) + deriving (Eq, Show, Ord, Monoid) + +newtype Gamma = Gamma (Map.Map Var Typing) + deriving (Eq, Show, Ord, Semigroup, Monoid) +\end{code} + +The star of the show, of course, are the typings themselves. A typing is +a pair of a (monomorphic) type $\tau$ and a $\Delta$-context, and in a +way it packages both the type of an expression and the variables it'll +use from the scope. + +\begin{code} +data Typing = Typing Delta Type + deriving (Eq, Show, Ord) +\end{code} + +With this, we're ready to look at how inference proceeds for +ML$\Delta$. I make no effort at relating the rules +implemented in code to anything except a vague idea of the rules in the +paper: Those are complicated, especially since they deal with a language +much more complicated than this humble calculus. In an effort not to +embarrass myself, I'll also not present anything "formal". + +--- + +\begin{code} +infer :: Exp -- The expression we're computing a typing for + -> Gamma -- The Γ context + -> [Var] -- A supply of fresh variables + -> Subst -- The ambient substitution + -> Either TypeError ( Typing -- The typing + , [Var] -- New variables + , Subst -- New substitution + ) +\end{code} + +There are two cases when dealing with variables. Either a typing is +present in the environment $\Gamma$, in which case we just use that +with some retouching to make sure type variables aren't repeated - this +takes the place of instantiating type schemes in Hindley-Milner. +However, a variable can also _not_ be in the environment $\Gamma$, in +which case we invent a fresh type variable $\alpha$[^2] for it and insist on +the monomorphic typing $\{ v :: \alpha \} \vdash \alpha$. + +\begin{code} +infer (Use v) (Gamma env) (new:xs) sub = + case Map.lookup v env of + Just ty -> -- Use the typing that was looked up + pure ((\(a, b) -> (a, b, sub)) (refresh ty xs)) + Nothing -> -- Make a new one! + let new_delta = Delta (Map.singleton v new_ty) + new_ty = TyVar new + in pure (Typing new_delta new_ty, xs, sub) +\end{code} + +Interestingly, this allows for (principal!) typings to be given even to +code containing free variables. The typing for the expression `x`, for +instance, is reported to be $\{ x :: \alpha \} \vdash \alpha$. Since +this isn't meant to be a compiler, there's no handling for variables +being out of scope, so the full inferred typings are printed on the +REPL- err, RETL? A read-eval-type-loop! + +``` +> x +{ x :: a } ⊢ a +``` + +Moreover, this system does not have type schemes: Typings subsume those +as well. Typings explicitly carry information regarding which type +variables are polymorphic and which are constrained by something in the +environment, avoiding a HM-like generalisation step. + +\begin{code} + where + refresh :: Typing -> [Var] -> (Typing, [Var]) + refresh (Typing (Delta delta) tau) xs = + let tau_fv = Set.toList (ftv tau `Set.difference` foldMap ftv delta) + (used, xs') = splitAt (length tau_fv) xs + sub = Subst (Map.fromList (zip tau_fv (map TyVar used))) + in (Typing (applyDelta sub delta) (apply sub tau), xs') +\end{code} + +`refresh`{.haskell} is responsible for ML$\Delta$'s analogue of +instantiation: New, fresh type variables are invented for each type +variable free in the type $\tau$ that is not also free in the context +$\Delta$. Whether or not this is better than $\forall$ quantifiers is up +for debate, but it is jolly neat. + +The case for application might be the most interesting. We infer two +typings $\Delta \vdash \tau$ and $\Delta' \vdash \sigma$ for the +function and the argument respectively, then unify $\tau$ with $\sigma +\to \alpha$ with $\alpha$ fresh. + + +\begin{code} +infer (App f a) env (alpha:xs) sub = do + (Typing delta_f type_f, xs, sub) <- infer f env xs sub + (Typing delta_a type_a, xs, sub) <- infer a env xs sub + + mgu <- unify (TyFun type_a (TyVar alpha)) type_f +\end{code} + +This is enough to make sure that the expressions involved are +compatible, but it does not ensure that the _contexts_ attached are also +compatible. So, the substitution is applied to both contexts and they +are merged - variables present in one but not in the other are kept, and +variables present in both have their types unified. + +\begin{code} + let delta_f' = applyDelta mgu delta_f + delta_a' = applyDelta mgu delta_a + delta_fa <- mergeDelta delta_f' delta_a' + + pure (Typing delta_fa (apply mgu (TyVar alpha)), xs, sub <> mgu) +\end{code} + +If a variable `x` has, say, type `Bool` in the function's context but `Int` +in the argument's context - that's a type error, one which that can be +very precisely reported as an inconsistency in the types `x` is used at +when trying to type some function application. This is _much_ better than +the HM approach, which would just claim the latter usage is wrong. +There are three spans of interest, not one. + +Inference for $\lambda$ abstractions is simple: We invent a fresh +monomorphic typing for the bound variable, add it to the context when +inferring a type for the body, then remove that one specifically from +the typing of the body when creating one for the overall abstraction. + +\begin{code} +infer (Lam v b) (Gamma env) (alpha:xs) sub = do + let ty = TyVar alpha + mono_typing = Typing (Delta (Map.singleton v ty)) ty + new_env = Gamma (Map.insert v mono_typing env) + + (Typing (Delta body_delta) body_ty, xs, sub) <- infer b new_env xs sub + + let delta' = Delta (Map.delete v body_delta) + pure (Typing delta' (apply sub (TyFun ty body_ty)), xs, sub) +\end{code} + +Care is taken to apply the ambient substitution to the type of the +abstraction so that details learned about the bound variable inside the +body will be reflected in the type. This could also be extracted from +the typing of the body, I suppose, but _eh_. + +`let`{.haskell}s are very easy, especially since generalisation is +implicit in the structure of typings. We simply compute a typing from +the body, _reduce_ it with respect to the let-bound variable, add it to +the environment and infer a typing for the body. + +\begin{code} +infer (Let (var, exp) body) gamma@(Gamma env) xs sub = do + (exp_t, xs, sub) <- infer exp gamma xs sub + let exp_s = reduceTyping var exp_t + gamma' = Gamma (Map.insert var exp_s env) + infer body gamma' xs sub +\end{code} + +Reduction w.r.t. a variable `x` is a very simple operation that makes +typings as polymorphic as possible, by deleting entries whose free type +variables are disjoint with the overall type along with the entry for +`x`. + +\begin{code} +reduceTyping :: Var -> Typing -> Typing +reduceTyping x (Typing (Delta delta) tau) = + let tau_fv = ftv tau + delta' = Map.filter keep (Map.delete x delta) + keep sigma = not $ Set.null (ftv sigma `Set.intersection` tau_fv) + in Typing (Delta delta') tau +\end{code} + +--- + +Parsing, error reporting and user interaction do not have interesting +implementations, so I have chosen not to include them here. + +Compositional typing is a very promising approach for languages with +simple polymorphic type systems, in my opinion, because it presents a +very cheap way of providing very accurate error messages much better +than those of Haskell, OCaml and even Elm, a language for which good +error messages are an explicit goal. + +As an example of this, consider the expression `fun x -> if x (add x 0) +1`{.ocaml} (or, in Haskell, `\x -> if x then (x + (0 :: Int)) else (1 :: +Int)`{.haskell} - the type annotations are to emulate +ML$\Delta$'s insistence on monomorphic numbers). + + Types Bool and Int aren't compatible + When checking that all uses of 'x' agree + + When that checking 'if x' (of type e -> e -> e) + can be applied to 'add x 0' (of type Int) + + Typing conflicts: + · x : Bool vs. Int + +The error message generated here is much better than the one GHC +reports, if you ask me. It points out not that x has some "actual" type +distinct from its "expected" type, as HM would conclude from its +left-to-right bias, but rather that two uses of `x` aren't compatible. + + :4:18: error: + • Couldn't match expected type ‘Int’ with actual type ‘Bool’ + • In the expression: (x + 0 :: Int) + In the expression: if x then (x + 0 :: Int) else 0 + In the expression: \ x -> if x then (x + 0 :: Int) else 0 + +Of course, the prototype doesn't care for positions, so the error +message is still not as good as it could be. + +Perhaps it should be further investigated whether this approach scales +to at least type classes (since a form of ad-hoc polymorphism is +absolutely needed) and polymorphic records, so that it can be used in a +real language. I have my doubts as to if a system like this could +reasonably be extended to support rank-N types, since it does not have +$\forall$ quantifiers. + +**UPDATE**: I found out that extending a compositional typing system to +support type classes is not only possible, it was also [Gergő Érdi's MSc. +thesis](https://gergo.erdi.hu/projects/tandoori/)! + +**UPDATE**: Again! This is new. Anyway, I've cleaned up the code and +[thrown it up on GitHub](https://github.com/zardyh/mld). + +Again, a full program implementing ML$\Delta$ is available +[here](https://github.com/zardyh/mld). +Thank you for reading! + + +[^1]: Olaf Chitil. 2001. Compositional explanation of types and +algorithmic debugging of type errors. In Proceedings of the sixth ACM +SIGPLAN international conference on Functional programming (ICFP '01). +ACM, New York, NY, USA, 193-204. +[DOI](http://dx.doi.org/10.1145/507635.507659). + +[^2]: Since I couldn't be arsed to set up monad transformers and all, + we're doing this the lazy way (ba dum tss): an infinite list of + variables, and hand-rolled reader/state monads. diff --git a/pages/posts/2019-09-22-amulet-records.md b/pages/posts/2019-09-22-amulet-records.md new file mode 100644 index 0000000..9cdeafc --- /dev/null +++ b/pages/posts/2019-09-22-amulet-records.md @@ -0,0 +1,191 @@ +--- +title: "A Quickie: Manipulating Records in Amulet" +date: September 22, 2019 +maths: true +--- + +Amulet, unlike some [other languages], has records figured out. Much like +in ML (and PureScript), they are their own, first-class entities in the +language as opposed to being syntax sugar for defining a product +constructor and projection functions. + +### Records are good + +Being entities in the language, it's logical to characterize them by +their introduction and elimination judgements[^1]. + +Records are introduced with record literals: + +$$ +\frac{ + \Gamma \vdash \overline{e \downarrow \tau} +}{ + \Gamma \vdash \{ \overline{\mathtt{x} = e} \} \downarrow \{ \overline{\mathtt{x} : \tau} \} +} +$$ + +And eliminated by projecting a single field: + +$$ +\frac{ + \Gamma \vdash r \downarrow \{ \alpha | \mathtt{x} : \tau \} +}{ + \Gamma \vdash r.\mathtt{x} \uparrow \tau +} +$$ + +Records also support monomorphic update: + +$$ +\frac{ + \Gamma \vdash r \downarrow \{ \alpha | \mathtt{x} : \tau \} +\quad \Gamma \vdash e \downarrow \tau +}{ + \Gamma \vdash \{ r\ \mathtt{with\ x} = e \} \downarrow \{ \alpha | \mathtt{x} : \tau \} +} +$$ + +### Records are.. kinda bad? + +Unfortunately, the rather minimalistic vocabulary for talking about +records makes them slightly worthless. There's no way to extend a +record, or to remove a key; Changing the type of a key is also +forbidden, with the only workaround being enumerating all of the keys +you _don't_ want to change. + +And, rather amusingly, given the trash-talking I pulled in the first +paragraph, updating nested records is still a nightmare. + +```amulet +> let my_record = { x = 1, y = { z = 3 } } +my_record : { x : int, y : { z : int } } +> { my_record with y = { my_record.y with z = 4 } } +_ = { x = 1, y = { z = 4 } } +``` + +Yikes. Can we do better? + +### An aside: Functional Dependencies + +Amulet recently learned how to cope with [functional dependencies]. +Functional dependencies extend multi-param type classes by allowing the +programmer to restrict the relationships between parameters. To +summarize it rather terribly: + +```amulet +(* an arbitrary relationship between types *) +class r 'a 'b +(* a function between types *) +class f 'a 'b | 'a -> 'b +(* a one-to-one mapping *) +class o 'a 'b | 'a -> 'b, 'b -> 'a +``` + +### Never mind, records are good + +As of [today], Amulet knows the magic `row_cons` type class, inspired by +[PureScript's class of the same name]. + +```amulet +class + row_cons 'record ('key : string) 'type 'new + | 'record 'key 'type -> 'new (* 1 *) + , 'new 'key -> 'record 'type (* 2 *) +begin + val extend_row : forall 'key -> 'type -> 'record -> 'new + val restrict_row : forall 'key -> 'new -> 'type * 'record +end +``` + +This class has built-in solving rules corresponding to the two +functional dependencies: + +1. If the original `record`, the `key` to be inserted, and its + `type` are all known, then the `new` record can be solved for; +2. If both the `key` that was inserted, and the `new` record, it is + possible to solve for the old `record` and the `type` of the `key`. + +Note that rule 2 almost lets `row_cons` be solved for in reverse. Indeed, this is expressed by the type of `restrict_row`, which discovers both the `type` and the original `record`. + +Using the `row_cons` class and its magical methods... + +1. Records can be extended: +```amulet +> Amc.extend_row @"foo" true { x = 1 } +_ : { foo : bool, x : int } = + { foo = true, x = 1 } +``` +2. Records can be restricted: +```amulet +> Amc.restrict_row @"x" { x = 1 } +_ : int * { } = (1, { x = 1 }) +``` + +And, given [a suitable framework of optics], records can be updated +nicely: + +```amulet +> { x = { y = 2 } } |> (r @"x" <<< r @"y") ^~ succ +_ : { x : { y : int } } = + { x = { y = 3 } } +``` + +### God, those are some ugly types + +It's worth pointing out that making an optic that works for all fields, +parametrised by a type-level string, is not easy or pretty, but it is +work that only needs to be done once. + +```ocaml +type optic 'p 'a 's <- 'p 'a 'a -> 'p 's 's + +class + Amc.row_cons 'r 'k 't 'n + => has_lens 'r 'k 't 'n + | 'k 'n -> 'r 't +begin + val rlens : strong 'p => proxy 'k -> optic 'p 't 'n +end + +instance + Amc.known_string 'key + * Amc.row_cons 'record 'key 'type 'new + => has_lens 'record 'key 'type 'new +begin + let rlens _ = + let view r = + let (x, _) = Amc.restrict_row @'key r + x + let set x r = + let (_, r') = Amc.restrict_row @'key r + Amc.extend_row @'key x r' + lens view set +end + +let r + : forall 'key -> forall 'record 'type 'new 'p. + Amc.known_string 'key + * has_lens 'record 'key 'type 'new + * strong 'p + => optic 'p 'type 'new = + fun x -> rlens @'record (Proxy : proxy 'key) x +``` + +--- + +Sorry for the short post, but that's it for today. + +--- + +[^1]: Record fields $\mathtt{x}$ are typeset in monospaced font to make +it apparent that they are unfortunately not first-class in the language, +but rather part of the syntax. Since Amulet's type system is inherently +bidirectional, the judgement $\Gamma \vdash e \uparrow \tau$ represents +type inference while $\Gamma \vdash e \downarrow \tau$ stands for type +checking. + +[functional dependencies]: https://web.cecs.pdx.edu/~mpj/pubs/fundeps.html +[other languages]: https://haskell.org +[today]: https://github.com/tmpim/amulet/pull/168 +[PureScript's class of the same name]: https://pursuit.purescript.org/builtins/docs/Prim.Row#t:Cons +[a suitable framework of optics]: /static/profunctors.ml.html diff --git a/pages/posts/2019-09-25-amc-prove.md b/pages/posts/2019-09-25-amc-prove.md new file mode 100644 index 0000000..5926727 --- /dev/null +++ b/pages/posts/2019-09-25-amc-prove.md @@ -0,0 +1,136 @@ +--- +title: "Announcement: amc-prove" +date: September 25, 2019 +maths: true +--- + +`amc-prove` is a smallish tool to automatically prove (some) sentences +of constructive quantifier-free[^1] first-order logic using the Amulet +compiler's capability to suggest replacements for typed holes. + +In addition to printing whether or not it could determine the truthiness +of the sentence, `amc-prove` will also report the smallest proof term it +could compute of that type. + +### What works right now + +* Function types `P -> Q`{.amcprove}, corresponding to $P \to Q$ in the logic. +* Product types `P * Q`{.amcprove}, corresponding to $P \land Q$ in the logic. +* Sum types `P + Q`{.amcprove}, corresponding to $P \lor Q$ in the logic +* `tt`{.amcprove} and `ff`{.amcprove} correspond to $\top$ and $\bot$ respectively +* The propositional bi-implication type `P <-> Q`{.amcprove} stands for $P \iff Q$ + and is interpreted as $P \to Q \land Q \to P$ + +### What is fiddly right now + +Amulet will not attempt to pattern match on a sum type nested inside a +product type. Concretely, this means having to replace $(P \lor Q) \land +R \to S$ by $(P \lor Q) \to R \to S$ (currying). + +`amc-prove`'s support for negation and quantifiers is incredibly fiddly. +There is a canonical empty type, `ff`{.amcprove}, but the negation +connective `not P`{.amcprove} expands to `P -> forall 'a. +'a`{.amcprove}, since empty types aren't properly supported. As a +concrete example, take the double-negation of the law of excluded middle +$\neg\neg(P \lor \neg{}P)$, which holds constructively. + +If you enter the direct translation of that sentence as a type, +`amc-prove` will report that it couldn't find a solution. However, by +using `P -> ff`{.amc-prove} instead of `not P`{.amc-prove}, a solution is +found. + +```amc-prove +? not (not (P + not P)) +probably not. +? ((P + (P -> forall 'a. 'a)) -> forall 'a. 'a) -> forall 'a. 'a +probably not. +? ((P + (P -> ff)) -> ff) -> ff +yes. + fun f -> f (R (fun b -> f (L b))) +``` + +### How to get it + +`amc-prove` is bundled with the rest of the Amulet compiler [on Github]. +You'll need [Stack] to build. I recommend building with `stack build +--fast` since the compiler is rather large and `amc-prove` does not +benefit much from GHC's optimisations. + +``` +% git clone https://github.com/tmpim/amc-prove.git +% cd amc-prove +% stack build --fast +% stack run amc-prove +Welcome to amc-prove. +? +``` + + +### Usage sample + +Here's a small demonstration of everything that works. + +```amc-prove +? P -> P +yes. + fun b -> b +? P -> Q -> P +yes. + fun a b -> a +? Q -> P -> P +yes. + fun a b -> b +? (P -> Q) * P -> Q +yes. + fun (h, x) -> h x +? P * Q -> P +yes. + fun (z, a) -> z +? P * Q -> Q +yes. + fun (z, a) -> a +? P -> Q -> P * Q +yes. + fun b c -> (b, c) +? P -> P + Q +yes. + fun y -> L y +? Q -> P + Q +yes. + fun y -> R y +? (P -> R) -> (Q -> R) -> P + Q -> R +yes. + fun g f -> function + | (L y) -> g y + | (R c) -> f c +? not (P * not P) +yes. + Not (fun (a, (Not h)) -> h a) +(* Note: Only one implication of DeMorgan's second law holds +constructively *) +? not (P + Q) <-> (not P) * (not Q) +yes. +(* Note: I have a marvellous term to prove this proposition, + but unfortunately it is too large to fit in this margin. *) +? (not P) + (not Q) -> not (P * Q) +yes. + function + | (L (Not f)) -> + Not (fun (a, b) -> f a) + | (R (Not g)) -> + Not (fun (y, z) -> g z) +``` + +[^1]: Technically, amc-prove "supports" the entire Amulet type system, +which includes things like type-classes and rank-N types (it's equal in +expressive power to System F). However, the hole-filling logic is meant +to aid the programmer while she codes, not exhaustively search for a +solution, so it was written to fail early and fail fast instead of +spending unbounded time searching for a solution that might not be +there. + +You can find the proof term I redacted from DeMorgan's first law [here]. + +[on Github]: https://github.com/tmpim/amulet +[Stack]: https://haskellstack.org +[here]: /static/demorgan-1.ml.html diff --git a/pages/posts/2019-09-29-amc-prove-interactive.md b/pages/posts/2019-09-29-amc-prove-interactive.md new file mode 100644 index 0000000..b6aea55 --- /dev/null +++ b/pages/posts/2019-09-29-amc-prove-interactive.md @@ -0,0 +1,202 @@ +--- +title: Interactive amc-prove +date: September 29, 2019 +--- + +Following my [last post announcing amc-prove](/posts/2019-09-25.html), I +decided I'd make it more accessible to people who wouldn't like to spend +almost 10 minutes compiling Haskell code just to play with a fiddly +prover. + +So I made a web interface for the thing: Play with it below. + +Text immediately following > is editable. + + + +
+
+
+> (not P) + (not Q) -> not (P * Q)
+
+
+
+ + + + diff --git a/pages/posts/2019-10-04-amulet-kinds.md b/pages/posts/2019-10-04-amulet-kinds.md new file mode 100644 index 0000000..6b5eb7d --- /dev/null +++ b/pages/posts/2019-10-04-amulet-kinds.md @@ -0,0 +1,369 @@ +--- +title: Typed Type-Level Computation in Amulet +date: October 04, 2019 +maths: true +--- + +Amulet, as a programming language, has a focus on strong static typing. This has led us to adopt +many features inspired by dependently-typed languages, the most prominent of which being typed holes +and GADTs, the latter being an imitation of indexed families. + +However, Amulet was up until recently sorely lacking in a way to express computational content in +types: It was possible to index datatypes by other, regular datatypes ("datatype promotion", in the +Haskell lingo) since the type and kind levels are one and the same, but writing functions on those +indices was entirely impossible. + +As of this week, the language supports two complementary mechanisms for typed type-level programming: +_type classes with functional dependencies_, a form of logic programming, and _type functions_, which +permit functional programming on the type level. + +I'll introduce them in that order; This post is meant to serve as an introduction to type-level +programming using either technique in general, but it'll also present some concepts formally and with +some technical depth. + +### Type Classes are Relations: Programming with Fundeps + +In set theory[^1] a _relation_ $R$ over a family of sets $A, B, C, \dots$ is a subset of the +cartesian product $A \times B \times C \times \dots$. If $(a, b, c, \dots) \in R_{A,B,C,\dots}$ we +say that $a$, $b$ and $c$ are _related_ by $R$. + +In this context, a _functional dependency_ is a term $X \leadsto Y$ +where $X$ and $Y$ are both sets of natural numbers. A relation is said +to satisfy a functional dependency $X \leadsto Y$ when, for any tuple in +the relation, the values at $X$ uniquely determine the values at $Y$. + +For instance, the relations $R_{A,B}$ satisfying $\{0\} \leadsto \{1\}$ are partial functions $A \to +B$, and if it were additionally to satisfy $\{1\} \leadsto \{0\}$ it would be a partial one-to-one +mapping. + +One might wonder what all of this abstract nonsense[^2] has to do with type classes. The thing is, a +type class `class foo : A -> B -> constraint`{.amulet} is a relation $\text{Foo}_{A,B}$! With this in +mind, it becomes easy to understand what it might mean for a type class to satisfy a functional +relation, and indeed the expressive power that they bring. + +To make it concrete: + +```amulet +class r 'a 'b (* an arbitrary relation between a and b *) +class f 'a 'b | 'a -> 'b (* a function from a to b *) +class i 'a 'b | 'a -> 'b, 'b -> 'a (* a one-to-one mapping between a and b *) +``` + +#### The Classic Example: Collections + +In Mark P. Jones' paper introducing functional dependencies, he presents as an example the class +`collects : type -> type -> constraint`{.amulet}, where `'e`{.amulet} is the type of elements in the +collection type `'ce`{.amulet}. This class can be used for all the standard, polymorphic collections +(of kind `type -> type`{.amulet}), but it also admits instances for monomorphic collections, like a +`bitset`. + +```amulet +class collects 'e 'ce begin + val empty : 'ce + val insert : 'e -> 'ce -> 'ce + val member : 'e -> 'ce -> bool +end +``` + +Omitting the standard implementation details, this class admits instances like: + +```amulet +class eq 'a => collects 'a (list 'a) +class eq 'a => collects 'a ('a -> bool) +instance collects char string (* amulet strings are not list char *) +``` + +However, Jones points out this class, as written, has a variety of problems. For starters, `empty`{.amulet} has +an ambiguous type, `forall 'e 'ce. collects 'e 'ce => 'ce`{.amulet}. This type is ambiguous because the type +varialbe `e`{.amulet} is $\forall$-bound, and appears in the constraint `collects 'e 'ce`{.amulet}, but doesn't +appear to the right of the `=>`{.amulet}; Thus, we can't solve it using unification, and the program +would have undefined semantics. + +Moreover, this class leads to poor inferred types. Consider the two functions `f`{.amulet} and `g`, below. +These have the types `(collects 'a 'c * collects 'b 'c) => 'a -> 'b -> 'c -> 'c`{.amulet} and +`(collects bool 'c * collects int 'c) => 'c -> 'c`{.amulet} respectively. + +```amulet +let f x y coll = insert x (insert y coll) +let g coll = f true 1 coll +``` + +The problem with the type of `f`{.amulet} is that it is too general, if we wish to model homogeneous +collections only; This leads to the type of `g`, which really ought to be a type error, but isn't; The +programming error in its definition won't be reported here, but at the use site, which might be in a +different module entirely. This problem of poor type inference and bad error locality motivates us to +refine the class `collects`, adding a functional dependency: + +```amulet +(* Read: 'ce determines 'e *) +class collects 'e 'ce | 'ce -> 'e begin + val empty : 'ce + val insert : 'e -> 'ce -> 'ce + val member : 'e -> 'ce -> bool +end +``` + +This class admits all the same instances as before, but now the functional dependency lets Amulet +infer an improved type for `f`{.amulet} and report the type error at `g`{.amulet}. + +```amulet +val f : collects 'a 'b => 'a -> 'a -> 'b -> 'b +``` + +``` + │ +2 │ let g coll = f true 1 coll + │ ^ + Couldn't match actual type int + with the type expected by the context, bool +``` + +One can see from the type of `f`{.amulet} that Amulet can simplify the conjunction of constraints +`collects 'a 'c * collects 'b 'c`{.amulet} into `collects 'a 'c`{.amulet} and substitute `'b`{.amulet} +for `'a`{.amulet} in the rest of the type. This is because the second parameter of `collects`{.amulet} +is enough to determine the first parameter; Since `'c`{.amulet} is obviously equal to itself, +`'a`{.amulet} must be equal to `'b`. + +We can observe improvement within the language using a pair of data types, `(:-) : constraint -> +constraint -> type`{.amulet} and `dict : constraint -> type`{.amulet}, which serve as witnesses of +implication between constraints and a single constraint respectively. + +```amulet +type dict 'c = Dict : 'c => dict 'c +type 'p :- 'q = Sub of ('p => unit -> dict 'q) + +let improve : forall 'a 'b 'c. (collects 'a 'c * collects 'b 'c) :- ('a ~ 'b) = + Sub (fun _ -> Dict) +``` + +Because this program type-checks, we can be sure that `collects 'a 'c * collects 'b 'c`{.amulet} +implies `'a`{.amulet} is equal to `'b`{.amulet}. Neat! + +### Computing with Fundeps: Natural Numbers and Vectors + +If you saw this coming, pat yourself on the back. + +I'm required by law to talk about vectors in every post about types. No, really; It's true. +I'm sure everyone's seen this by now, but vectors are cons-lists indexed by their type as a Peano +natural. + +```amulet +type nat = Z | S of nat +type vect 'n 'a = + | Nil : vect Z 'a + | Cons : 'a * vect 'n 'a -> vect (S 'n) 'a +``` + +Our running objective for this post will be to write a function to append two vectors, such that the +length of the result is the sum of the lengths of the arguments.[^3] But, how do we even write the +type of such a function? + +Here we can use a type class with functional dependencies witnessing the fact that $a + b = c$, for +some $a$, $b$, $c$ all in $\mathbb{N}$. Obviously, knowing $a$ and $b$ is enough to know $c$, and the +functional dependency expresses that. Due to the way we're going to be implementing `add`, the other +two functional dependencies aren't admissible. + +```amulet +class add 'a 'b 'c | 'a 'b -> 'c begin end +``` + +Adding zero to something just results in that something, and if $a + b = c$ then $(1 + a) + b = 1 + c$. + +```amulet +instance add Z 'a 'a begin end +instance add 'a 'b 'c => add (S 'a) 'b (S 'c) begin end +``` + +With this in hands, we can write a function to append vectors. + +```amulet +let append : forall 'n 'k 'm 'a. add 'n 'k 'm + => vect 'n 'a -> vect 'k 'a -> vect 'm 'a = + fun xs ys -> + match xs with + | Nil -> ys + | Cons (x, xs) -> Cons (x, append xs ys) +``` + +Success! +... or maybe not. Amulet's complaining about our definition of `append` even though it's correct; What +gives? + +The problem is that while functional dependencies let us conclude equalities from pairs of instances, +it doesn't do us any good if there's a single instance. So we need a way to reflect the equalities in +a way that can be pattern-matched on. If your GADT senses are going off, that's a good thing. + +#### Computing with Evidence + +This is terribly boring to do and what motivated me to add type functions to Amulet in the first +place, but the solution here is to have a GADT that mirrors the structure of the class instances, and +make the instances compute that. Then, in our append function, we can match on this evidence to reveal +equalities to the type checker. + +```amulet +type add_ev 'k 'n 'm = + | AddZ : add_ev Z 'a 'a + | AddS : add_ev 'a 'b 'c -> add_ev (S 'a) 'b (S 'c) + +class add 'a 'b 'c | 'a 'b -> 'c begin + val ev : add_ev 'a 'b 'c +end + +instance add Z 'a 'a begin + let ev = AddZ +end + +instance add 'a 'b 'c => add (S 'a) 'b (S 'c) begin + let ev = AddS ev +end +``` + +Now we can write vector `append` using the `add_ev` type. + +```amulet +let append' (ev : add_ev 'n 'm 'k) + (xs : vect 'n 'a) + (ys : vect 'm 'a) + : vect 'k 'a = + match ev, xs with + | AddZ, Nil -> ys + | AddS p, Cons (x, xs) -> Cons (x, append' p xs ys) +and append xs ys = append' ev xs ys +``` + +This type-checks and we're done. + +### Functions on Types: Programming with Closed Type Functions + +Look, duplicating the structure of a type class at the value level just so the compiler can figure out +equalities is stupid. Can't we make it do that work instead? Enter _closed type functions_. + +```amulet +type function (+) 'n 'm begin + Z + 'n = 'n + (S 'k) + 'n = S ('k + 'n) +end +``` + +This declaration introduces the type constructor `(+)`{.amulet} (usually written infix) and two rules +for reducing types involving saturated applications of `(+)`{.amulet}. Type functions, unlike type +classes which are defined like Prolog clauses, are defined in a pattern-matching style reminiscent of +Haskell. + +Each type function has a set of (potentially overlapping) _equations_, and the compiler will reduce an +application using an equation as soon as it's sure that equation is the only possible equation based +on the currently-known arguments. + +Using the type function `(+)`{.amulet} we can use our original implementation of `append` and have it +type-check: + +```amulet +let append (xs : vect 'n 'a) (ys : vect 'k 'a) : vect ('n + 'k) 'a = + match xs with + | Nil -> ys + | Cons (x, xs) -> Cons (x, append xs ys) +let ys = append (Cons (1, Nil)) (Cons (2, Cons (3, Nil))) +``` + +Now, a bit of a strange thing is that Amulet reduces type family applications as lazily as possible, +so that `ys` above has type `vect (S Z + S (S Z)) int`{.amulet}. In practice, this isn't an issue, as +a simple ascription shows that this type is equal to the more orthodox `vect (S (S (S Z))) +int`{.amulet}. + +```amulet +let zs : vect (S (S (S Z))) int = ys +``` + +Internally, type functions do pretty much the same thing as the functional dependency + evidence +approach we used earlier. Each equation gives rise to an equality _axiom_, represented as a +constructor because our intermediate language pretty much lets constructors return whatever they damn +want. + +```amulet +type + '(n : nat) '(m : nat) = + | awp : forall 'n 'm 'r. 'n ~ Z -> 'm ~ 'n -> ('n + 'm) ~ 'n + | awq : forall 'n 'k 'm 'l. 'n ~ (S 'k) -> 'm ~ 'l + -> ('n + 'm) ~ (S ('k + 'l)) +``` + +These symbols have ugly autogenerated names because they're internal to the compiler and should never +appear to users, but you can see that `awp` and `awq` correspond to each clause of the `(+)`{.amulet} +type function, with a bit more freedom in renaming type variables. + +### Custom Type Errors: Typing Better + +Sometimes - I mean, pretty often - you have better domain knowledge than Amulet. For instance, you +might know that it's impossible to `show` a function. The `type_error` type family lets you tell the +type checker this: + +```amulet +instance + (type_error (String "Can't show functional type:" :<>: ShowType ('a -> 'b)) + => show ('a -> 'b) +begin + let show _ = "" +end +``` + +Now trying to use `show` on a function value will give you a nice error message: + +```amulet +let _ = show (fun x -> x + 1) +``` +``` + │ +1 │ let _ = show (fun x -> x + 1) + │ ^^^^^^^^^^^^^^^^^^^^^ + Can't show functional type: int -> int +``` + +### Type Families can Overlap + +Type families can tell when two types are equal or not: + +```amulet +type function equal 'a 'b begin + discrim 'a 'a = True + discrim 'a 'b = False +end +``` + +But overlapping equations need to agree: + +```amulet +type function overlap_not_ok 'a begin + overlap_not_ok int = string + overlap_not_ok int = int +end +``` +``` + Overlapping equations for overlap_not_ok int + • Note: first defined here, + │ +2 │ overlap_not_ok int = string + │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^ + but also defined here + │ +3 │ overlap_not_ok int = int + │ ^^^^^^^^^^^^^^^^^^^^^^^^ +``` + +### Conclusion + +Type families and type classes with functional dependencies are both ways to introduce computation in +the type system. They both have their strengths and weaknesses: Fundeps allow improvement to inferred +types, but type families interact better with GADTs (since they generate more equalities). Both are +important in language with a focus on type safety, in my opinion. + +[^1]: This is not actually the definition of a relation with full generality; Set theorists are + concerned with arbitrary families of sets indexed by some $i \in I$, where $I$ is a set of indices; + Here, we've set $I = \mathbb{N}$ and restrict ourselves to the case where relations are tuples. + +[^2]: At least it's not category theory. + +[^3]: In the shower today I actually realised that the `append` function on vectors is a witness to + the algebraic identity $a^n * a^m = a^{n + m}$. Think about it: the `vect 'n`{.amulet} functor is + representable by `fin 'n`{.amulet}, i.e. it is isomorphic to functions `fin 'n -> 'a`{.amulet}. By + definition, `fin 'n`{.amulet} is the type with `'n`{.amulet} elements, and arrow types `'a -> + 'b`{.amulet} have $\text{size}(b)^{\text{size}(a)}$ elements, which leads us to conclude `vect 'n + 'a` has size $\text{size}(a)^n$ elements. diff --git a/pages/posts/2019-10-19-amulet-quicklook.md b/pages/posts/2019-10-19-amulet-quicklook.md new file mode 100644 index 0000000..91584c2 --- /dev/null +++ b/pages/posts/2019-10-19-amulet-quicklook.md @@ -0,0 +1,126 @@ +--- +title: "A Quickie: A Use Case for Impredicative Polymorphism" +path: impredicative-polymorphism +date: October 19, 2019 +--- + +Amulet now (as of the 18th of October) has support for impredicative +polymorphism based on [Quick Look impredicativity], an algorithm first +proposed for GHC that treats inference of applications as a two-step +process to enable inferring impredicative types. + +As a refresher, impredicative types (in Amulet) are types in which a +`forall`{.amuçet} appears under a type constructor (that is not +`(->)`{.amulet} or `(*)`{.amulet}, since those have special variance in +the compiler). + +Quick Look impredicativity works by doing type checking of applications +in two phases: the _quick look_, which is called so because it's faster +than regular type inference, and the regular type-checking of +arguments. + +Given a `n`-ary application +f x1 ... xn: + +
    +
  1. +The _quick look_ proceeds by inferring the type of the function to +expose the first `n` quantifiers, based on the form of the arguments. +For a regular term argument `e`, we expect a `'t ->`{.amulet} quantifier; For +visible type arguments, we expect either `forall 'a.`{.amulet} or +`forall 'a ->`{.amulet}. + +After we have each of the quantifiers, we quickly infer a type for each +of the _simple_ arguments in +x1 ... xn. +Here, simple means either a +variable, literal, application or an expression annotated with a type +`x : t`{.amulet}. With this type in hands, we unify it with the type +expected by the quantifier, to collect a partial substituion (in which +unification failures are ignored), used to discover impredicative +instantiation. + +For example, say `f : 'a -> list 'a -> list 'a`{.amulet} (the cons +function)[^1], and we want to infer the application `f (fun x -> x) (Nil +@(forall 'xx. 'xx -> 'xx))`{.amulet}. Here, the quick look will inspect +each argument in turn, coming up with a `list 'a ~ list (forall 'xx. 'xx +-> 'xx)`{.amulet} equality by looking at the second argument. Since the +first argument is not simple, it tells us nothing. Thus, the second +phase starts with the substitution `'a := forall 'xx. 'xx -> +'xx`{.amulet}. +
  2. + +
  3. +The second phase is traditional type-checking of each argument in turn, +against its respective quantifier. Here we use Amulet's type-checking +function `check` instead of applying type-inference then constraining +with subsumption since that results in more precise resuls. + +However, instead of taking the quantifiers directly from the function's +inferred type, we first apply the substitution generated by the +quick-look. Thus, keeping with the example, we check the function `(fun +x -> x)`{.amulet} against the type `forall 'xx. 'xx -> 'xx`{.amulet}, +instead of checking it against the type variable `'a`{.amulet}. + +This is important because checking against a type variable degrades to +inference + subsumption, which we wanted to avoid in the first place! +Thus, if we had no quick look, the function `(fun x -> x)`{.amulet} +would be given monomorphic type `'t1 -> 't2`{.amulet} (where +`'t1'`{.amulet}, `'t2`{.amulet} are fresh unification variables), and +we'd try to unify `list ('t1 -> 't2) ~ list (forall 'xx. 'xx -> +'xx)`{.amulet} - No dice! +
  4. +
+ +### Why does this matter? + +Most papers discussing impredicative polymorphism focus on the boring, +useless example of stuffing a list with identity functions. Indeed, this +is what I demonstrated above. + +However, a much more useful example is putting _lenses_ in lists (or +`optional`{.amulet}, `either`{.amulet}, or what have you). Recall the +van Laarhoven encoding of lenses: + +```amulet +type lens 's 't 'a 'b <- forall 'f. functor 'f => ('a -> 'f 'b) -> 's -> 'f 't +``` + +If you're not a fan of that, consider also the profunctor encoding of +lenses: + +```amulet +type lens 's 't 'a 'b <- forall 'p. strong 'p => 'p 'a 'b -> 'p 's 't +``` + +These types are _both_ polymorphic, which means we can't normally have a +`list (lens _ _ _ _)`{.amulet}. This is an issue! The Haskell `lens` +library works around this by providing a `LensLike`{.haskell} type, +which is not polymorphic and takes the functor `f` by means of an +additional parameter. However, consider the difference in denotation +between + +```haskell +foo :: [Lens a a Int Int] -> a -> (Int, a) +bar :: Functor f => [LensLike f a a Int Int] -> a -> (Int, a) +``` + +The first function takes a list of lenses; It can then use these lenses +in any way it pleases. The second, however, takes a list of lens-like +values _that all use the same functor_. Thus, you can't `view` using the +head of the list and `over` using the second element! (Recall that +`view` uses the `Const`{.haskell} functor and `over`{.amulet} the +`Identity`{.amulet} functor). Indeed, the second function can't use the +lenses at all, since it must work for an arbitrary functor and not +`Const`{.haskell}/`Identity`{.haskell}. + +Of course, [Amulet lets you put lenses in lists]: See `lens_list` and +`xs` at the bottom of the file. + + +[^1]: Assume that the `'a`{.amulet} variable is bound by a `forall +'a.`{.amulet} quantifier. Since we don't use visible type application in +the following example, I just skipped mentioning it. + +[Quick Look impredicativity]: https://github.com/serras/ghc-proposals/blob/quick-look/proposals/0000-quick-look-impredicativity.md +[Amulet lets you put lenses in lists]: /static/profunctor-impredicative.ml.html diff --git a/pages/posts/2020-01-31-lazy-eval.lhs b/pages/posts/2020-01-31-lazy-eval.lhs new file mode 100644 index 0000000..8663b67 --- /dev/null +++ b/pages/posts/2020-01-31-lazy-eval.lhs @@ -0,0 +1,1351 @@ +--- +title: "The G-machine In Detail, or How Lazy Evaluation Works" +date: January 31, 2020 +maths: true +--- + +\long\def\ignore#1{} + +\ignore{ +\begin{code} +{-# LANGUAGE RecordWildCards, NamedFieldPuns, CPP #-} +#if !defined(Section) +#error "You haven't specified a section to load! Re-run with -DSection=1 or -DSection=2" +#endif +#if defined(Section) && (Section != 1 && Section != 2) +#error Section "isn't a valid section to load! Re-run with -DSection=1 or -DSection=2" +#endif +\end{code} +} + + + + + + + +With Haskell now more popular than ever, a great deal of programmers +deal with lazy evaluation in their daily lives. They're aware of the +pitfalls of lazy I/O, know not to use `foldl`, and are masters at +introducing bang patterns in the right place. But very few programmers +know the magic behind lazy evaluation—graph reduction. + +This post is an abridged adaptation of Simon Peyton Jones' and David R. +Lester's book, _"Implementing Functional Languages: a tutorial."_, +itself a refinement of SPJ's previous work, 1987's _"The Implementation +of Functional Programming Languages"_. The newer book doesn't cover as +much material as the previous: it focuses mostly on the evaluation of +functional programs, and indeed that is our focus today as well. For +this, it details three abstract machines: The G-machine, the Three +Instruction Machine (affectionately called Tim), and a parallel +G-machine. + +In this post we'll take a look first at a stack-based machine for +reducing arithmetic expressions. Armed with the knowledge of how typical +stack machines work, we'll take a look at the G-machine, and how graph +reduction works (and where the name comes from in the first place!) + +This post is written as [a Literate Haskell source file], with Cpp +conditionals to enable/disable each section. To compile a specific +section, use GHC like this: + +```bash +ghc -XCPP -DSection=1 2020-01-09.lhs +``` + +----- + +\ignore{ +\begin{code} +{-# LANGUAGE CPP #-} +#if Section == 1 +\end{code} +} + +\begin{code} +module StackArith where +\end{code} + +Section 1: Evaluating Arithmetic with a Stack +============================================= + +Stack machines are the base for all of the computation models we're +going to explore today. To get a better feel of how they work, the first +model of computation we're going to describe is stack-based arithmetic, +better known as reverse polish notation. This machine also forms the +basis of the programming language FORTH. First, let us define a data +type for arithmetic expressions, including the four basic operators +(addition, multiplication, subtraction and division.) + +\begin{code} +data AExpr + = Lit Int + | Add AExpr AExpr + | Sub AExpr AExpr + | Mul AExpr AExpr + | Div AExpr AExpr + deriving (Eq, Show, Ord) +\end{code} + +This language has an 'obvious' denotation, which can be realised using +an interpreter function, such as `aInterpret` below. + +\begin{code} +aInterpret :: AExpr -> Int +aInterpret (Lit n) = n +aInterpret (Add e1 e2) = aInterpret e1 + aInterpret e2 +aInterpret (Sub e1 e2) = aInterpret e1 - aInterpret e2 +aInterpret (Mul e1 e2) = aInterpret e1 * aInterpret e2 +aInterpret (Div e1 e2) = aInterpret e1 `div` aInterpret e2 +\end{code} + +Alternatively, we can implement the language through its _operational_ +behaviour, by compiling it to a series of instructions that, when +executed in an appropriate machine, leave it in a _final state_ from +which we can extract the expression's result. + +Our abstract machine for aritmethic will be a _stack_ based machine with +only a handful of instructions. The type of instructions is +`AInstr`{.haskell}. + +\begin{code} +data AInstr + = Push Int + | IAdd | IMul | ISub | IDiv + deriving (Eq, Show, Ord) +\end{code} + +The state of the machine is simply a pair, containing an instruction +stream and a stack of values. By our compilation scheme, the machine is +never in a state where more values are required on the stack than there +are values present; This would not be the case if we let programmers +directly write instruction streams. + +We can compile a program into a sequence of instructions recursively. + +\begin{code} +aCompile :: AExpr -> [AInstr] +aCompile (Lit i) = [Push i] +aCompile (Add e1 e2) = aCompile e1 ++ aCompile e2 ++ [IAdd] +aCompile (Mul e1 e2) = aCompile e1 ++ aCompile e2 ++ [IMul] +aCompile (Sub e1 e2) = aCompile e1 ++ aCompile e2 ++ [ISub] +aCompile (Div e1 e2) = aCompile e1 ++ aCompile e2 ++ [IDiv] +\end{code} + +And we can write a function to represent the state transition rules of +the machine. + +\begin{code} +aEval :: ([AInstr], [Int]) -> ([AInstr], [Int]) +aEval (Push i:xs, st) = (xs, i:st) +aEval (IAdd:xs, x:y:st) = (xs, (x + y):st) +aEval (IMul:xs, x:y:st) = (xs, (x * y):st) +aEval (ISub:xs, x:y:st) = (xs, (x - y):st) +aEval (IDiv:xs, x:y:st) = (xs, (x `div` y):st) +\end{code} + +A state is said to be _final_ when it has an empty instruction stream +and a single result on the stack. To run a program, we simply repeat +`aEval` until a final state is reached. + +\begin{code} +aRun :: [AInstr] -> Int +aRun is = go (is, []) where + go st | Just i <- final st = i + go st = go (aEval st) + + final ([], [n]) = Just n + final _ = Nothing +\end{code} + +A very important property linking our compiler, abstract machine and +interpreter together is that of _compiler correctness_. That is: + +```haskell +forall x. aRun (aCompile x) == aInterpret x +``` + +As an example, the arithmetic expression $2 + 3 \times 4$ produces the +following code sequence: + +```haskell +[Push 2,Push 3,Push 4,IMul,IAdd] +``` + +You can interactively follow the execution of this program with the tool +below. Pressing the Step button is equivalent to `aEval`. The stack is +drawn in boxes to the left, and the instruction sequence is presented on +the right, where the `>` marks the currently executing instruction (the +"program counter", if you will). + + + +
+
+
+
+
+ + +
+
+
+
+
+ + + + +
0
+
+
+
+{-# LANGUAGE LambdaCase #-}
+module Parser
+  ( Parser()
+  , module X
+  , Parser.any
+  , satisfy
+  , string
+  , digit
+  , number
+  , spaces
+  , reserved
+  , lexeme
+  , (<?>)
+  , runParser
+  , between ) where
+
+import Control.Applicative as X
+import Control.Monad as X
+
+import Data.Char
+
+newtype Parser a
+  = Parser { parse :: String -> Either String (a, String) }
+
+runParser :: Parser a -> String -> Either String a
+runParser (Parser p) s = fst <$> p s
+
+(<?>) :: Parser a -> String -> Parser a
+p <?> err = p <|> fail err
+infixl 2 <?>
+
+instance Functor Parser where
+  fn `fmap` (Parser p) = Parser go where
+    go st = case p st of
+      Left e            -> Left e
+      Right (res, str') -> Right (fn res, str')
+
+instance Applicative Parser where
+  pure x = Parser $ \str -> Right (x, str)
+  (Parser p) <*> (Parser p') = Parser go where
+    go st = case p st of
+      Left e -> Left e
+      Right (fn, st') -> case p' st' of
+        Left e' -> Left e'
+        Right (v, st'') -> Right (fn v, st'')
+
+instance Alternative Parser where
+  empty = fail "nothing"
+  (Parser p) <|> (Parser p') = Parser go where
+    go st = case p st of
+      Left _  -> p' st
+      Right x -> Right x
+
+instance Monad Parser where
+  return = pure
+  (Parser p) >>= f = Parser go where
+    go s = case p s of
+      Left e -> Left e
+      Right (x, s') -> parse (f x) s'
+  fail m = Parser go where
+    go = Left . go'
+    go' []     = "expected " ++ m ++ ", got to the end of stream"
+    go' (x:xs) = "expected " ++ m ++ ", got '" ++ x:"'"
+
+
+any :: Parser Char
+any = Parser go where
+  go []     = Left "any: end of file"
+  go (x:xs) = Right (x,xs)
+
+satisfy :: (Char -> Bool) -> Parser Char
+satisfy f = do x <- Parser.any
+               if f x
+                 then return x
+                 else fail "a solution to the function"
+
+
+char :: Char -> Parser Char
+char c = satisfy (c ==) <?> "literal " ++ [c]
+
+oneOf :: String -> Parser Char
+oneOf s = satisfy (`elem` s) <?> "one of '" ++ s ++ "'"
+
+string :: String -> Parser String
+string [] = return []
+string (x:xs) = do char   x
+                   string xs
+                   return $ x:xs
+
+natural :: Parser Integer
+natural = read <$> some (satisfy isDigit)
+
+lexeme :: Parser a -> Parser a
+lexeme = (<* spaces)
+
+reserved :: String -> Parser String
+reserved = lexeme . string
+
+spaces :: Parser String
+spaces = many $ oneOf " \n\r"
+
+digit :: Parser Char
+digit = satisfy isDigit
+
+number :: Parser Int
+number = do
+  s <- string "-" <|> empty
+  cs <- some digit
+  return $ read (s ++ cs)
+
+between :: Parser b -> Parser c -> Parser a -> Parser a
+between o c x = o *> x <* c
+
+contents :: Parser a -> Parser a
+contents x = spaces *> x <* spaces
+
+sep :: Parser b -> Parser a -> Parser [a]
+sep s c = sep1 s c <|> return []
+
+sep1 :: Parser b -> Parser a -> Parser [a]
+sep1 s c = do x <- c
+              xs <- many $ s >> c
+              return $ x:xs
+
+option :: a -> Parser a -> Parser a
+option x p = p <|> return x
+
+optionMaybe :: Parser a -> Parser (Maybe a)
+optionMaybe p = option Nothing $ Just <$> p
+
+optional :: Parser a -> Parser ()
+optional p = void p <|> return ()
+
+eof :: Parser ()
+eof = Parser go where
+  go (x:_) = Left $ "expected eof, got '" ++ x:"'"
+  go []    = Right ((), [])
+
+ + + diff --git a/static/demorgan-1.ml.html b/static/demorgan-1.ml.html new file mode 100644 index 0000000..cae9d35 --- /dev/null +++ b/static/demorgan-1.ml.html @@ -0,0 +1,100 @@ + + + + +~/Projects/blag/demorgan-1.ml.html + + + + + + + + + + + +
0
+
+
+
+Equiv (fun (Not g) -> (Not (fun b -> g (L b)), Not (fun c -> g (R c))), fun ((Not h), (Not g)) -> Not function
+  | (L y) -> h y
+  | (R a) -> h (g a))
+
+ + + diff --git a/static/doom.svg b/static/doom.svg new file mode 100644 index 0000000..e8b4d43 --- /dev/null +++ b/static/doom.svg @@ -0,0 +1,3024 @@ + + + + + + +%3 + + +N0 + +if + + + +N1 + +mul + + + +N2 + +sub + + + +N3 + +equ + + + +N4 + +fac + + + +N4->N0 + + + + + +N4->N1 + + + + + +N4->N2 + + + + + +N4->N3 + + + + + +N4->N4 + + + + + +N5 + +main + + + +N5->N4 + + + + + +N6 + +10 + + + +N7 + +@ + + + +N7->N4 + + + + + +N7->N6 + + + + + +N8 + +@ + + + +N8->N3 + + + + + +N8->N6 + + + + + +N9 + +0 + + + +N10 + +@ + + + +N10->N8 + + + + + +N10->N9 + + + + + +N11 + +@ + + + +N11->N0 + + + + + +N11->N10 + + + + + +N12 + +1 + + + +N13 + +@ + + + +N13->N11 + + + + + +N13->N12 + + + + + +N14 + +@ + + + +N14->N1 + + + + + +N14->N6 + + + + + +N15 + +@ + + + +N15->N2 + + + + + +N15->N6 + + + + + +N16 + +1 + + + +N17 + +@ + + + +N17->N15 + + + + + +N17->N16 + + + + + +N18 + +@ + + + +N18->N4 + + + + + +N18->N17 + + + + + +N19 + +@ + + + +N19->N14 + + + + + +N19->N18 + + + + + +N20 + +@ + + + +N20->N13 + + + + + +N20->N19 + + + + + +N21 + +1 + + + +N22 + +@ + + + +N22->N3 + + + + + +N22->N17 + + + + + +N23 + +0 + + + +N24 + +@ + + + +N24->N22 + + + + + +N24->N23 + + + + + +N25 + +@ + + + +N25->N0 + + + + + +N25->N24 + + + + + +N26 + +1 + + + +N27 + +@ + + + +N27->N25 + + + + + +N27->N26 + + + + + +N28 + +@ + + + +N28->N1 + + + + + +N28->N17 + + + + + +N29 + +@ + + + +N29->N2 + + + + + +N29->N17 + + + + + +N30 + +1 + + + +N31 + +@ + + + +N31->N29 + + + + + +N31->N30 + + + + + +N32 + +@ + + + +N32->N4 + + + + + +N32->N31 + + + + + +N33 + +@ + + + +N33->N28 + + + + + +N33->N32 + + + + + +N34 + +@ + + + +N34->N27 + + + + + +N34->N33 + + + + + +N35 + +9 + + + +N36 + +1 + + + +N37 + +9 + + + +N38 + +@ + + + +N38->N3 + + + + + +N38->N31 + + + + + +N39 + +0 + + + +N40 + +@ + + + +N40->N38 + + + + + +N40->N39 + + + + + +N41 + +@ + + + +N41->N0 + + + + + +N41->N40 + + + + + +N42 + +1 + + + +N43 + +@ + + + +N43->N41 + + + + + +N43->N42 + + + + + +N44 + +@ + + + +N44->N1 + + + + + +N44->N31 + + + + + +N45 + +@ + + + +N45->N2 + + + + + +N45->N31 + + + + + +N46 + +1 + + + +N47 + +@ + + + +N47->N45 + + + + + +N47->N46 + + + + + +N48 + +@ + + + +N48->N4 + + + + + +N48->N47 + + + + + +N49 + +@ + + + +N49->N44 + + + + + +N49->N48 + + + + + +N50 + +@ + + + +N50->N43 + + + + + +N50->N49 + + + + + +N51 + +9 + + + +N52 + +8 + + + +N53 + +1 + + + +N54 + +9 + + + +N55 + +8 + + + +N56 + +@ + + + +N56->N3 + + + + + +N56->N47 + + + + + +N57 + +0 + + + +N58 + +@ + + + +N58->N56 + + + + + +N58->N57 + + + + + +N59 + +@ + + + +N59->N0 + + + + + +N59->N58 + + + + + +N60 + +1 + + + +N61 + +@ + + + +N61->N59 + + + + + +N61->N60 + + + + + +N62 + +@ + + + +N62->N1 + + + + + +N62->N47 + + + + + +N63 + +@ + + + +N63->N2 + + + + + +N63->N47 + + + + + +N64 + +1 + + + +N65 + +@ + + + +N65->N63 + + + + + +N65->N64 + + + + + +N66 + +@ + + + +N66->N4 + + + + + +N66->N65 + + + + + +N67 + +@ + + + +N67->N62 + + + + + +N67->N66 + + + + + +N68 + +@ + + + +N68->N61 + + + + + +N68->N67 + + + + + +N69 + +9 + + + +N70 + +8 + + + +N71 + +7 + + + +N72 + +1 + + + +N73 + +9 + + + +N74 + +8 + + + +N75 + +7 + + + +N76 + +@ + + + +N76->N3 + + + + + +N76->N65 + + + + + +N77 + +0 + + + +N78 + +@ + + + +N78->N76 + + + + + +N78->N77 + + + + + +N79 + +@ + + + +N79->N0 + + + + + +N79->N78 + + + + + +N80 + +1 + + + +N81 + +@ + + + +N81->N79 + + + + + +N81->N80 + + + + + +N82 + +@ + + + +N82->N1 + + + + + +N82->N65 + + + + + +N83 + +@ + + + +N83->N2 + + + + + +N83->N65 + + + + + +N84 + +1 + + + +N85 + +@ + + + +N85->N83 + + + + + +N85->N84 + + + + + +N86 + +@ + + + +N86->N4 + + + + + +N86->N85 + + + + + +N87 + +@ + + + +N87->N82 + + + + + +N87->N86 + + + + + +N88 + +@ + + + +N88->N81 + + + + + +N88->N87 + + + + + +N89 + +9 + + + +N90 + +8 + + + +N91 + +7 + + + +N92 + +6 + + + +N93 + +1 + + + +N94 + +9 + + + +N95 + +8 + + + +N96 + +7 + + + +N97 + +6 + + + +N98 + +@ + + + +N98->N3 + + + + + +N98->N85 + + + + + +N99 + +0 + + + +N100 + +@ + + + +N100->N98 + + + + + +N100->N99 + + + + + +N101 + +@ + + + +N101->N0 + + + + + +N101->N100 + + + + + +N102 + +1 + + + +N103 + +@ + + + +N103->N101 + + + + + +N103->N102 + + + + + +N104 + +@ + + + +N104->N1 + + + + + +N104->N85 + + + + + +N105 + +@ + + + +N105->N2 + + + + + +N105->N85 + + + + + +N106 + +1 + + + +N107 + +@ + + + +N107->N105 + + + + + +N107->N106 + + + + + +N108 + +@ + + + +N108->N4 + + + + + +N108->N107 + + + + + +N109 + +@ + + + +N109->N104 + + + + + +N109->N108 + + + + + +N110 + +@ + + + +N110->N103 + + + + + +N110->N109 + + + + + +N111 + +9 + + + +N112 + +8 + + + +N113 + +7 + + + +N114 + +6 + + + +N115 + +5 + + + +N116 + +1 + + + +N117 + +9 + + + +N118 + +8 + + + +N119 + +7 + + + +N120 + +6 + + + +N121 + +5 + + + +N122 + +@ + + + +N122->N3 + + + + + +N122->N107 + + + + + +N123 + +0 + + + +N124 + +@ + + + +N124->N122 + + + + + +N124->N123 + + + + + +N125 + +@ + + + +N125->N0 + + + + + +N125->N124 + + + + + +N126 + +1 + + + +N127 + +@ + + + +N127->N125 + + + + + +N127->N126 + + + + + +N128 + +@ + + + +N128->N1 + + + + + +N128->N107 + + + + + +N129 + +@ + + + +N129->N2 + + + + + +N129->N107 + + + + + +N130 + +1 + + + +N131 + +@ + + + +N131->N129 + + + + + +N131->N130 + + + + + +N132 + +@ + + + +N132->N4 + + + + + +N132->N131 + + + + + +N133 + +@ + + + +N133->N128 + + + + + +N133->N132 + + + + + +N134 + +@ + + + +N134->N127 + + + + + +N134->N133 + + + + + +N135 + +9 + + + +N136 + +8 + + + +N137 + +7 + + + +N138 + +6 + + + +N139 + +5 + + + +N140 + +4 + + + +N141 + +1 + + + +N142 + +9 + + + +N143 + +8 + + + +N144 + +7 + + + +N145 + +6 + + + +N146 + +5 + + + +N147 + +4 + + + +N148 + +@ + + + +N148->N3 + + + + + +N148->N131 + + + + + +N149 + +0 + + + +N150 + +@ + + + +N150->N148 + + + + + +N150->N149 + + + + + +N151 + +@ + + + +N151->N0 + + + + + +N151->N150 + + + + + +N152 + +1 + + + +N153 + +@ + + + +N153->N151 + + + + + +N153->N152 + + + + + +N154 + +@ + + + +N154->N1 + + + + + +N154->N131 + + + + + +N155 + +@ + + + +N155->N2 + + + + + +N155->N131 + + + + + +N156 + +1 + + + +N157 + +@ + + + +N157->N155 + + + + + +N157->N156 + + + + + +N158 + +@ + + + +N158->N4 + + + + + +N158->N157 + + + + + +N159 + +@ + + + +N159->N154 + + + + + +N159->N158 + + + + + +N160 + +@ + + + +N160->N153 + + + + + +N160->N159 + + + + + +N161 + +9 + + + +N162 + +8 + + + +N163 + +7 + + + +N164 + +6 + + + +N165 + +5 + + + +N166 + +4 + + + +N167 + +3 + + + +N168 + +1 + + + +N169 + +9 + + + +N170 + +8 + + + +N171 + +7 + + + +N172 + +6 + + + +N173 + +5 + + + +N174 + +4 + + + +N175 + +3 + + + +N176 + +@ + + + +N176->N3 + + + + + +N176->N157 + + + + + +N177 + +0 + + + +N178 + +@ + + + +N178->N176 + + + + + +N178->N177 + + + + + +N179 + +@ + + + +N179->N0 + + + + + +N179->N178 + + + + + +N180 + +1 + + + +N181 + +@ + + + +N181->N179 + + + + + +N181->N180 + + + + + +N182 + +@ + + + +N182->N1 + + + + + +N182->N157 + + + + + +N183 + +@ + + + +N183->N2 + + + + + +N183->N157 + + + + + +N184 + +1 + + + +N185 + +@ + + + +N185->N183 + + + + + +N185->N184 + + + + + +N186 + +@ + + + +N186->N4 + + + + + +N186->N185 + + + + + +N187 + +@ + + + +N187->N182 + + + + + +N187->N186 + + + + + +N188 + +@ + + + +N188->N181 + + + + + +N188->N187 + + + + + +N189 + +9 + + + +N190 + +8 + + + +N191 + +7 + + + +N192 + +6 + + + +N193 + +5 + + + +N194 + +4 + + + +N195 + +3 + + + +N196 + +2 + + + +N197 + +1 + + + +N198 + +9 + + + +N199 + +8 + + + +N200 + +7 + + + +N201 + +6 + + + +N202 + +5 + + + +N203 + +4 + + + +N204 + +3 + + + +N205 + +2 + + + +N206 + +@ + + + +N206->N3 + + + + + +N206->N185 + + + + + +N207 + +0 + + + +N208 + +@ + + + +N208->N206 + + + + + +N208->N207 + + + + + +N209 + +@ + + + +N209->N0 + + + + + +N209->N208 + + + + + +N210 + +1 + + + +N211 + +@ + + + +N211->N209 + + + + + +N211->N210 + + + + + +N212 + +@ + + + +N212->N1 + + + + + +N212->N185 + + + + + +N213 + +@ + + + +N213->N2 + + + + + +N213->N185 + + + + + +N214 + +1 + + + +N215 + +@ + + + +N215->N213 + + + + + +N215->N214 + + + + + +N216 + +@ + + + +N216->N4 + + + + + +N216->N215 + + + + + +N217 + +@ + + + +N217->N212 + + + + + +N217->N216 + + + + + +N218 + +@ + + + +N218->N211 + + + + + +N218->N217 + + + + + +N219 + +9 + + + +N220 + +8 + + + +N221 + +7 + + + +N222 + +6 + + + +N223 + +5 + + + +N224 + +4 + + + +N225 + +3 + + + +N226 + +2 + + + +N227 + +1 + + + +N228 + +1 + + + +N229 + +9 + + + +N230 + +8 + + + +N231 + +7 + + + +N232 + +6 + + + +N233 + +5 + + + +N234 + +4 + + + +N235 + +3 + + + +N236 + +2 + + + +N237 + +1 + + + +N238 + +@ + + + +N238->N3 + + + + + +N238->N215 + + + + + +N239 + +0 + + + +N240 + +@ + + + +N240->N238 + + + + + +N240->N239 + + + + + +N241 + +@ + + + +N241->N0 + + + + + +N241->N240 + + + + + +N242 + +1 + + + +N243 + +@ + + + +N243->N241 + + + + + +N243->N242 + + + + + +N244 + +@ + + + +N244->N1 + + + + + +N244->N215 + + + + + +N245 + +@ + + + +N245->N2 + + + + + +N245->N215 + + + + + +N246 + +1 + + + +N247 + +@ + + + +N247->N245 + + + + + +N247->N246 + + + + + +N248 + +@ + + + +N248->N4 + + + + + +N248->N247 + + + + + +N249 + +@ + + + +N249->N244 + + + + + +N249->N248 + + + + + +N250 + +@ + + + +N250->N243 + + + + + +N250->N249 + + + + + +N251 + +9 + + + +N252 + +8 + + + +N253 + +7 + + + +N254 + +6 + + + +N255 + +5 + + + +N256 + +4 + + + +N257 + +3 + + + +N258 + +2 + + + +N259 + +1 + + + +N260 + +0 + + + +N261 + +0 + + + +N262 + +1 + + + +N263 + +2 + + + +N264 + +6 + + + +N265 + +24 + + + +N266 + +120 + + + +N267 + +720 + + + +N268 + +5040 + + + +N269 + +40320 + + + +N270 + +362880 + + + +N271 + +3628800 + + + +stack + +stack + + + +stack->N271 + + + + + diff --git a/static/forth_machine.js b/static/forth_machine.js new file mode 100644 index 0000000..be6b955 --- /dev/null +++ b/static/forth_machine.js @@ -0,0 +1,119 @@ +let draw = SVG().addTo('#forth').size('100%', '100%') + +let stack_element = (container, text) => { + let group = container.group() + group.add( + container.rect() + .size(100, 25) + .stroke('#000').fill('#ddd') + .attr('stroke-width', 2)); + group.add(container.text(text).dmove((65 - text.length) / 2, -2)); + console.log(group); + return group; +} + +let the_code = [ + [ 'push', 2 ], + [ 'push', 3 ], + [ 'push', 4 ], + [ 'mul' ], + [ 'add' ] +] + +let the_stack = [], pc = 0, final = false; +let stack_container = draw.nested().move(draw.width() - '10%', 0) +let code_node = document.getElementById('code'); + +let push_val = (int) => { + let the_element = + stack_element(stack_container, int.toString()).move(10, 0); + the_element.animate(100, 0, 'now').move(10, 10); + the_stack.forEach(elem => elem.svg.animate(100, 0, 'now').dy(25)); + the_stack.push({ svg: the_element, val: int }); +} + +let pop_val = () => { + let item = the_stack.pop() + item.svg.remove(); + the_stack.forEach(elem => elem.svg.dy(-25)); + return item.val; +} + +let render_code = (code, pc) => { + while (code_node.firstChild) { + code_node.removeChild(code_node.firstChild); + } + let list = document.createElement('ul'); + list.style = 'list-style-type: none;'; + code.forEach((instruction, idx) => { + let i_type = instruction[0]; + let li = document.createElement('li'); + + if (idx == pc) { + let cursor = document.createElement('span') + cursor.innerText = '> '; + cursor.classList.add('instruction-cursor'); + li.appendChild(cursor); + } + + let type_field = document.createElement('span'); + type_field.innerText = i_type; + type_field.classList.add('instruction'); + li.appendChild(type_field); + for (let i = 1; i < instruction.length; i++) { + li.append(' '); + let operand_field = document.createElement('span'); + operand_field.innerText = instruction[i]; + operand_field.classList.add('operand'); + li.appendChild(operand_field); + } + list.appendChild(li); + }); + code_node.appendChild(list); +}; + +let reset = () => { + the_stack.forEach(e => e.svg.remove()); + the_stack = []; + pc = 0; + final = false; + document.getElementById('step').disabled = false; + render_code(the_code, 0); +} + +let step = () => { + if (!final) { + const insn = the_code[pc++]; + switch (insn[0]) { + case 'push': + push_val(insn[1]); + break; + case 'add': + if (the_stack.length < 2) { + console.error("machine error"); + document.getElementById('step').disabled = true; + } else { + let x = pop_val(), y = pop_val(); + push_val(x + y); + } + break; + case 'mul': + if (the_stack.length < 2) { + console.error("machine error"); + document.getElementById('step').disabled = true; + } else { + let x = pop_val(), y = pop_val(); + push_val(x * y); + } + break; + } + } + render_code(the_code, pc); + if (pc >= the_code.length) { + console.log("final state"); + document.getElementById('step').disabled = true; + final = true; + } +} + +render_code(the_code, pc); diff --git a/static/generated_code.lua b/static/generated_code.lua new file mode 100644 index 0000000..c43bd46 --- /dev/null +++ b/static/generated_code.lua @@ -0,0 +1,22 @@ +eq_3f_1 = setmetatable1(({["lookup"]=({})}), ({["__call"]=(function(temp_this, x, y) + local temp_method + local temp = temp_this["lookup"] + if temp then + local temp1 = temp[type1(x)] + if temp1 then + temp_method = temp1[type1(y)] or nil + else + temp_method = nil + end + else + temp_method = nil + end + if not temp_method then + if temp_this["default"] then + temp_method = temp_this["default"] + else + error1("No matching method to call for " .. (type1(x) .. " ") .. (type1(y) .. " ") .. "\nthere are methods to call for " .. keys1(temp_this["lookup"])) + end + end + return temp_method(x, y) +end)})) diff --git a/static/generated_code.lua.html b/static/generated_code.lua.html new file mode 100644 index 0000000..ff6791d --- /dev/null +++ b/static/generated_code.lua.html @@ -0,0 +1,124 @@ + + + + +~/Projects/blag/static/generated_code.lua.html + + + + + + + + + + + +
0
+
+
+
+eq_3f_1 = setmetatable1(({["lookup"]=({})}), ({["__call"]=(function(temp_this, x, y)
+  local temp_method
+  local temp = temp_this["lookup"]
+  if temp then
+    local temp1 = temp[type1(x)]
+    if temp1 then
+      temp_method = temp1[type1(y)] or nil
+    else
+      temp_method = nil
+    end
+  else
+    temp_method = nil
+  end
+  if not temp_method then
+    if temp_this["default"] then
+      temp_method = temp_this["default"]
+    else
+      error1("No matching method to call for " .. (type1(x) .. " ") .. (type1(y) .. " ") .. "\nthere are methods to call for " .. keys1(temp_this["lookup"]))
+    end
+  end
+  return temp_method(x, y)
+end)}))
+
+ + + diff --git a/static/icon/android-chrome-192x192.png b/static/icon/android-chrome-192x192.png new file mode 100644 index 0000000..f91e336 Binary files /dev/null and b/static/icon/android-chrome-192x192.png differ diff --git a/static/icon/android-chrome-512x512.png b/static/icon/android-chrome-512x512.png new file mode 100644 index 0000000..39c6c42 Binary files /dev/null and b/static/icon/android-chrome-512x512.png differ diff --git a/static/icon/apple-touch-icon.png b/static/icon/apple-touch-icon.png new file mode 100644 index 0000000..f7be2ef Binary files /dev/null and b/static/icon/apple-touch-icon.png differ diff --git a/static/icon/cube-128x.png b/static/icon/cube-128x.png new file mode 100644 index 0000000..1365f59 Binary files /dev/null and b/static/icon/cube-128x.png differ diff --git a/static/icon/favicon-16x16.png b/static/icon/favicon-16x16.png new file mode 100644 index 0000000..54fb1cc Binary files /dev/null and b/static/icon/favicon-16x16.png differ diff --git a/static/icon/favicon-32x32.png b/static/icon/favicon-32x32.png new file mode 100644 index 0000000..9b522a3 Binary files /dev/null and b/static/icon/favicon-32x32.png differ diff --git a/static/icon/favicon.ico b/static/icon/favicon.ico new file mode 100644 index 0000000..f9cba75 Binary files /dev/null and b/static/icon/favicon.ico differ diff --git a/static/icon/pfp.jpg b/static/icon/pfp.jpg new file mode 100644 index 0000000..a3cf90e Binary files /dev/null and b/static/icon/pfp.jpg differ diff --git a/static/icon/pfp.png b/static/icon/pfp.png new file mode 100644 index 0000000..db9ec9b Binary files /dev/null and b/static/icon/pfp.png differ diff --git a/static/icon/pfp@128.png b/static/icon/pfp@128.png new file mode 100644 index 0000000..d7d8088 Binary files /dev/null and b/static/icon/pfp@128.png differ diff --git a/static/icon/pfp@256.png b/static/icon/pfp@256.png new file mode 100644 index 0000000..877b699 Binary files /dev/null and b/static/icon/pfp@256.png differ diff --git a/static/icon/pfp@512.png b/static/icon/pfp@512.png new file mode 100644 index 0000000..5c1fd64 Binary files /dev/null and b/static/icon/pfp@512.png differ diff --git a/static/icon/pfp@96.png b/static/icon/pfp@96.png new file mode 100644 index 0000000..cde9ebc Binary files /dev/null and b/static/icon/pfp@96.png differ diff --git a/static/licenses/LICENSE.Iosevka b/static/licenses/LICENSE.Iosevka new file mode 100644 index 0000000..b13467d --- /dev/null +++ b/static/licenses/LICENSE.Iosevka @@ -0,0 +1,110 @@ +Copyright 2015-2021, Renzhi Li (aka. Belleve Invis, belleve@typeof.net) + +This Font Software is licensed under the SIL Open Font License, Version 1.1. + +This license is copied below, and is also available with a FAQ at: +http://scripts.sil.org/OFL + +-------------------------- + + +SIL Open Font License v1.1 +==================================================== + + +Preamble +---------- + +The goals of the Open Font License (OFL) are to stimulate worldwide +development of collaborative font projects, to support the font creation +efforts of academic and linguistic communities, and to provide a free and +open framework in which fonts may be shared and improved in partnership +with others. + +The OFL allows the licensed fonts to be used, studied, modified and +redistributed freely as long as they are not sold by themselves. The +fonts, including any derivative works, can be bundled, embedded, +redistributed and/or sold with any software provided that any reserved +names are not used by derivative works. The fonts and derivatives, +however, cannot be released under any other type of license. The +requirement for fonts to remain under this license does not apply +to any document created using the fonts or their derivatives. + + +Definitions +------------- + +`"Font Software"` refers to the set of files released by the Copyright +Holder(s) under this license and clearly marked as such. This may +include source files, build scripts and documentation. + +`"Reserved Font Name"` refers to any names specified as such after the +copyright statement(s). + +`"Original Version"` refers to the collection of Font Software components as +distributed by the Copyright Holder(s). + +`"Modified Version"` refers to any derivative made by adding to, deleting, +or substituting -- in part or in whole -- any of the components of the +Original Version, by changing formats or by porting the Font Software to a +new environment. + +`"Author"` refers to any designer, engineer, programmer, technical +writer or other person who contributed to the Font Software. + + +Permission & Conditions +------------------------ + +Permission is hereby granted, free of charge, to any person obtaining +a copy of the Font Software, to use, study, copy, merge, embed, modify, +redistribute, and sell modified and unmodified copies of the Font +Software, subject to the following conditions: + +1. Neither the Font Software nor any of its individual components, + in Original or Modified Versions, may be sold by itself. + +2. Original or Modified Versions of the Font Software may be bundled, + redistributed and/or sold with any software, provided that each copy + contains the above copyright notice and this license. These can be + included either as stand-alone text files, human-readable headers or + in the appropriate machine-readable metadata fields within text or + binary files as long as those fields can be easily viewed by the user. + +3. No Modified Version of the Font Software may use the Reserved Font + Name(s) unless explicit written permission is granted by the corresponding + Copyright Holder. This restriction only applies to the primary font name as + presented to the users. + +4. The name(s) of the Copyright Holder(s) or the Author(s) of the Font + Software shall not be used to promote, endorse or advertise any + Modified Version, except to acknowledge the contribution(s) of the + Copyright Holder(s) and the Author(s) or with their explicit written + permission. + +5. The Font Software, modified or unmodified, in part or in whole, + must be distributed entirely under this license, and must not be + distributed under any other license. The requirement for fonts to + remain under this license does not apply to any document created + using the Font Software. + + + +Termination +----------- + +This license becomes null and void if any of the above conditions are +not met. + + + DISCLAIMER + + THE FONT SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, + EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO ANY WARRANTIES OF + MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT + OF COPYRIGHT, PATENT, TRADEMARK, OR OTHER RIGHT. IN NO EVENT SHALL THE + COPYRIGHT HOLDER BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, + INCLUDING ANY GENERAL, SPECIAL, INDIRECT, INCIDENTAL, OR CONSEQUENTIAL + DAMAGES, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + FROM, OUT OF THE USE OR INABILITY TO USE THE FONT SOFTWARE OR FROM + OTHER DEALINGS IN THE FONT SOFTWARE. diff --git a/static/licenses/LICENSE.KaTeX b/static/licenses/LICENSE.KaTeX new file mode 100644 index 0000000..e2ae27d --- /dev/null +++ b/static/licenses/LICENSE.KaTeX @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2013-2018 Khan Academy + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/static/licenses/LICENSE.Noto b/static/licenses/LICENSE.Noto new file mode 100644 index 0000000..bf911cf --- /dev/null +++ b/static/licenses/LICENSE.Noto @@ -0,0 +1,200 @@ + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/static/not-doom.svg b/static/not-doom.svg new file mode 100644 index 0000000..1667564 --- /dev/null +++ b/static/not-doom.svg @@ -0,0 +1,31 @@ + + + + + + +G + + + +stack + +stack + + + +3628800 + +3628800 + + + +stack->3628800 + + + + + diff --git a/static/profunctor-impredicative.ml.html b/static/profunctor-impredicative.ml.html new file mode 100644 index 0000000..bc9e465 --- /dev/null +++ b/static/profunctor-impredicative.ml.html @@ -0,0 +1,215 @@ + + + + +~/Projects/amulet/profunctor-impredicative.ml.html + + + + + + + + + + + +
0
+
+
+
+external val print : 'a -> unit = "print"
+
+let id x = x
+let f & g = fun x -> f (g x)
+let const x _ = x
+let x |> f = f x
+
+let fst (x, _) = x
+let snd (_, x) = x
+let uncurry f (x, y) = f x y
+
+let fork f g x = (f x, g x)
+
+class profunctor 'p begin
+  val dimap : forall 'a 'b 'c 'd. ('b -> 'a) -> ('c -> 'd)
+              -> 'p 'a 'c -> 'p 'b 'd
+end
+
+let lmap g = dimap g id
+let rmap x = dimap id x
+
+class profunctor 'p => strong 'p begin
+  val first : forall 'a 'b 'c. 'p 'a 'b -> 'p ('a * 'c) ('b * 'c)
+  val second : forall 'a 'b 'c. 'p 'a 'b -> 'p ('c * 'a) ('c * 'b)
+end
+
+type either 'l 'r = Left of 'l | Right of 'r
+
+let either f g = function
+  | Left x -> f x
+  | Right y -> g y
+
+class profunctor 'p => choice 'p begin
+  val left : forall 'a 'b 'c. 'p 'a 'b
+              -> 'p (either 'a 'c) (either 'b 'c)
+  val right : forall 'a 'b 'c. 'p 'a 'b
+              -> 'p (either 'c 'a) (either 'c 'b)
+end
+
+class monoid 'm begin
+  val (<>) : 'm -> 'm -> 'm
+  val zero : 'm
+end
+
+type forget 'r 'a 'b = Forget of 'a -> 'r
+let remember (Forget r) = r
+
+instance profunctor (->)
+  let dimap f g h = g & h & f
+
+instance strong (->)
+  let first f (x, y) = (f x, y)
+  let second f (x, y) = (x, f y)
+
+instance choice (->)
+  let left f = either (Left & f) Right
+  let right f = either Left (Right & f)
+
+instance profunctor (forget 'r)
+  let dimap f _ (Forget g) = Forget (g & f)
+
+instance monoid 'r => choice (forget 'r)
+  let left (Forget z) = Forget (either z (const zero))
+  let right (Forget z) = Forget (either (const zero) z)
+
+instance strong (forget 'r)
+  let first (Forget z) = Forget (z & fst)
+  let second (Forget z) = Forget (z & snd)
+
+let lens get set =
+  dimap (fork get id) (uncurry set) & first
+
+let view l = remember (l (Forget id))
+let over f = f
+let set l b = over l (const b)
+
+type pair 'a 'b = Pair of 'a * 'b
+let fst' (Pair (x, _)) = x
+let snd' (Pair (_, x)) = x
+
+let first' x = lens fst' (fun x (Pair (_, y)) -> Pair (x, y)) x
+let second' x = lens snd' (fun y (Pair (x, _)) -> Pair (x, y)) x
+
+type proxy 'a = Proxy
+
+type lens 's 't 'a 'b <- forall 'p. strong 'p => 'p 'a 'b -> 'p 's 't
+type lens' 's 'a <- lens 's 's 'a 'a
+
+class Amc.row_cons 'record 'key 'type 'new => has_lens 'record 'key 'type 'new | 'key 'new -> 'record 'type begin
+  val rlens : forall 'p. strong 'p => proxy 'key -> 'p 'type 'type -> 'p 'new 'new
+end
+
+instance Amc.known_string 'key * Amc.row_cons 'record 'key 'type 'new => has_lens 'record 'key 'type 'new begin
+  let rlens _ =
+    let view r =
+      let (x, _) = Amc.restrict_row @'key r
+      x
+    let set x r =
+      let (_, r') = Amc.restrict_row @'key r
+      Amc.extend_row @'key x r'
+    lens view set
+end
+
+let r : forall 'key -> forall 'record 'type 'new 'p. Amc.known_string 'key * has_lens 'record 'key 'type 'new * strong 'p => 'p 'type 'type -> 'p 'new 'new =
+  fun x -> rlens @'record (Proxy : proxy 'key) x
+
+let x :: xs = Cons (x, xs)
+let lens_list () = (fun x -> r @"foo" x) :: (fun x -> r @"bar" x) :: Nil @(lens' _ _)
+
+let map f xs = [ f x | with x <- xs ]
+
+let x = { foo = 1, bar = 2 }
+let xs = map (`view` x) (lens_list ())
+
+ + + diff --git a/static/profunctors.ml.html b/static/profunctors.ml.html new file mode 100644 index 0000000..1160e19 --- /dev/null +++ b/static/profunctors.ml.html @@ -0,0 +1,224 @@ + + + + +~/Projects/amulet/profunctors.ml.html + + + + + + + + + + + +
0
+
+
+
+external val print : 'a -> unit = "print"
+
+let id x = x
+let f <<< g = fun x -> f (g x)
+let const x _ = x
+let x |> f = f x
+
+let fst (x, _) = x
+let snd (_, x) = x
+let uncurry f (x, y) = f x y
+
+let fork f g x = (f x, g x)
+
+class profunctor 'p begin
+  val dimap : forall 'a 'b 'c 'd. ('b -> 'a) -> ('c -> 'd)
+              -> 'p 'a 'c -> 'p 'b 'd
+end
+
+let lmap g = dimap g id
+let rmap x = dimap id x
+
+class profunctor 'p => strong 'p begin
+  val first : forall 'a 'b 'c. 'p 'a 'b -> 'p ('a * 'c) ('b * 'c)
+  val second : forall 'a 'b 'c. 'p 'a 'b -> 'p ('c * 'a) ('c * 'b)
+end
+
+type either 'l 'r = Left of 'l | Right of 'r
+
+let either f g = function
+  | Left x -> f x
+  | Right y -> g y
+
+class profunctor 'p => choice 'p begin
+  val left : forall 'a 'b 'c. 'p 'a 'b
+              -> 'p (either 'a 'c) (either 'b 'c)
+  val right : forall 'a 'b 'c. 'p 'a 'b
+              -> 'p (either 'c 'a) (either 'c 'b)
+end
+
+class monoid 'm begin
+  val (<>) : 'm -> 'm -> 'm
+  val zero : 'm
+end
+
+type forget 'r 'a 'b = Forget of 'a -> 'r
+let remember (Forget r) = r
+
+instance profunctor (->)
+  let dimap f g h = g <<< h <<< f
+
+instance strong (->)
+  let first f (x, y) = (f x, y)
+  let second f (x, y) = (x, f y)
+
+instance choice (->)
+  let left f = either (Left <<< f) Right
+  let right f = either Left (Right <<< f)
+
+instance profunctor (forget 'r)
+  let dimap f _ (Forget g) = Forget (g <<< f)
+
+instance monoid 'r => choice (forget 'r)
+  let left (Forget z) = Forget (either z (const zero))
+  let right (Forget z) = Forget (either (const zero) z)
+
+instance strong (forget 'r)
+  let first (Forget z) = Forget (z <<< fst)
+  let second (Forget z) = Forget (z <<< snd)
+
+let lens get set =
+  dimap (fork get id) (uncurry set) <<< first
+
+let view l = remember (l (Forget id))
+let over f = f
+let set l b = over l (const b)
+
+let x ^. l = view l x
+let l ^~ f = over l f
+
+type pair 'a 'b = Pair of 'a * 'b
+let fst' (Pair (x, _)) = x
+let snd' (Pair (_, x)) = x
+
+let first' x = lens fst' (fun x (Pair (_, y)) -> Pair (x, y)) x
+let second' x = lens snd' (fun y (Pair (x, _)) -> Pair (x, y)) x
+
+type proxy 'a = Proxy
+
+type optic 'p 'a 's <- 'p 'a 'a -> 'p 's 's
+
+class
+     Amc.row_cons 'r 'k 't 'n
+  => has_lens 'r 'k 't 'n
+  | 'k 'n -> 'r 't
+begin
+  val rlens : strong 'p => proxy 'k -> optic 'p 't 'n
+end
+
+instance
+    Amc.known_string 'key
+  * Amc.row_cons 'record 'key 'type 'new
+  => has_lens 'record 'key 'type 'new
+begin
+  let rlens _ =
+    let view r =
+      let (x, _) = Amc.restrict_row @'key r
+      x
+    let set x r =
+      let (_, r') = Amc.restrict_row @'key r
+      Amc.extend_row @'key x r'
+    lens view set
+end
+
+let r
+  : forall 'key -> forall 'record 'type 'new 'p.
+     Amc.known_string 'key
+   * has_lens 'record 'key 'type 'new
+   * strong 'p
+  => optic 'p 'type 'new =
+  fun x -> rlens @'record (Proxy : proxy 'key) x
+
+let succ = (+ 1)
+
+ + + diff --git a/static/svg/amulet.svg b/static/svg/amulet.svg new file mode 100644 index 0000000..90b5543 --- /dev/null +++ b/static/svg/amulet.svg @@ -0,0 +1,10 @@ + + + + + + + + + + diff --git a/static/svg/gitea.svg b/static/svg/gitea.svg new file mode 100644 index 0000000..68594db --- /dev/null +++ b/static/svg/gitea.svg @@ -0,0 +1,31 @@ + + + + + + + + + + + + diff --git a/static/svg/github.svg b/static/svg/github.svg new file mode 100644 index 0000000..33a0d49 --- /dev/null +++ b/static/svg/github.svg @@ -0,0 +1,3 @@ + + + \ No newline at end of file diff --git a/static/svg/kofi.svg b/static/svg/kofi.svg new file mode 100644 index 0000000..8d9a658 --- /dev/null +++ b/static/svg/kofi.svg @@ -0,0 +1,46 @@ + + + + + + + + + diff --git a/static/svg/mastodon.svg b/static/svg/mastodon.svg new file mode 100644 index 0000000..cc1d323 --- /dev/null +++ b/static/svg/mastodon.svg @@ -0,0 +1,50 @@ + + + + + + + + diff --git a/static/svg/twitter.svg b/static/svg/twitter.svg new file mode 100644 index 0000000..c8342b6 --- /dev/null +++ b/static/svg/twitter.svg @@ -0,0 +1,3 @@ + + + \ No newline at end of file diff --git a/static/tasks.lisp b/static/tasks.lisp new file mode 100644 index 0000000..58c030f --- /dev/null +++ b/static/tasks.lisp @@ -0,0 +1,26 @@ +(import urn/control/prompt ()) + +(defun run-tasks (&tasks) ; 1 + (loop [(queue tasks)] ; 2 + [(empty? queue)] ; 2 + (call/p 'task (car queue) + (lambda (k) + (when (alive? k) + (push-cdr! queue k)))) ; 2 + (recur (cdr queue)))) + +(defun yield () + (abort-to-prompt 'task)) + +(run-tasks + (lambda () + (map (lambda (x) + (print! $"loop 1: ~{x}") + (yield)) + (range :from 1 :to 5))) + (lambda () + (map (lambda (x) + (print! $"loop 2: ~{x}") + (yield)) + (range :from 1 :to 5)))) + diff --git a/static/tasks.lisp.html b/static/tasks.lisp.html new file mode 100644 index 0000000..7f7b1e8 --- /dev/null +++ b/static/tasks.lisp.html @@ -0,0 +1,126 @@ + + + + +~/Projects/blag/static/tasks.lisp.html + + + + + + + + + + + +
0
+
+
+
+(import urn/control/prompt ())
+
+(defun run-tasks (&tasks) ; 1
+  (loop [(queue tasks)] ; 2
+    [(empty? queue)]    ; 2
+    (call/p 'task (car queue)
+      (lambda (k)
+        (when (alive? k)
+          (push-cdr! queue k)))) ; 2
+    (recur (cdr queue))))
+
+(defun yield ()
+  (abort-to-prompt 'task))
+
+(run-tasks
+  (lambda ()
+    (map (lambda (x)
+           (print! $"loop 1: ~{x}")
+           (yield))
+         (range :from 1 :to 5)))
+  (lambda ()
+    (map (lambda (x)
+           (print! $"loop 2: ~{x}")
+           (yield))
+         (range :from 1 :to 5))))
+
+
+ + + diff --git a/static/ttf/iosevk-abbie-bold.ttf b/static/ttf/iosevk-abbie-bold.ttf new file mode 100644 index 0000000..85db838 Binary files /dev/null and b/static/ttf/iosevk-abbie-bold.ttf differ diff --git a/static/ttf/iosevk-abbie-bolditalic.ttf b/static/ttf/iosevk-abbie-bolditalic.ttf new file mode 100644 index 0000000..12b31ef Binary files /dev/null and b/static/ttf/iosevk-abbie-bolditalic.ttf differ diff --git a/static/ttf/iosevk-abbie-boldoblique.ttf b/static/ttf/iosevk-abbie-boldoblique.ttf new file mode 100644 index 0000000..d9ce3e7 Binary files /dev/null and b/static/ttf/iosevk-abbie-boldoblique.ttf differ diff --git a/static/ttf/iosevk-abbie-extended.ttf b/static/ttf/iosevk-abbie-extended.ttf new file mode 100644 index 0000000..d687f8b Binary files /dev/null and b/static/ttf/iosevk-abbie-extended.ttf differ diff --git a/static/ttf/iosevk-abbie-extendedbold.ttf b/static/ttf/iosevk-abbie-extendedbold.ttf new file mode 100644 index 0000000..49a9e10 Binary files /dev/null and b/static/ttf/iosevk-abbie-extendedbold.ttf differ diff --git a/static/ttf/iosevk-abbie-extendedbolditalic.ttf b/static/ttf/iosevk-abbie-extendedbolditalic.ttf new file mode 100644 index 0000000..f4d057d Binary files /dev/null and b/static/ttf/iosevk-abbie-extendedbolditalic.ttf differ diff --git a/static/ttf/iosevk-abbie-extendedboldoblique.ttf b/static/ttf/iosevk-abbie-extendedboldoblique.ttf new file mode 100644 index 0000000..e912ab0 Binary files /dev/null and b/static/ttf/iosevk-abbie-extendedboldoblique.ttf differ diff --git a/static/ttf/iosevk-abbie-extendedextrabold.ttf b/static/ttf/iosevk-abbie-extendedextrabold.ttf new file mode 100644 index 0000000..4dfe62f Binary files /dev/null and b/static/ttf/iosevk-abbie-extendedextrabold.ttf differ diff --git a/static/ttf/iosevk-abbie-extendedextrabolditalic.ttf b/static/ttf/iosevk-abbie-extendedextrabolditalic.ttf new file mode 100644 index 0000000..49ed186 Binary files /dev/null and b/static/ttf/iosevk-abbie-extendedextrabolditalic.ttf differ diff --git a/static/ttf/iosevk-abbie-extendedextraboldoblique.ttf b/static/ttf/iosevk-abbie-extendedextraboldoblique.ttf new file mode 100644 index 0000000..acc1ca0 Binary files /dev/null and b/static/ttf/iosevk-abbie-extendedextraboldoblique.ttf differ diff --git a/static/ttf/iosevk-abbie-extendedheavy.ttf b/static/ttf/iosevk-abbie-extendedheavy.ttf new file mode 100644 index 0000000..77ba995 Binary files /dev/null and b/static/ttf/iosevk-abbie-extendedheavy.ttf differ diff --git a/static/ttf/iosevk-abbie-extendedheavyitalic.ttf b/static/ttf/iosevk-abbie-extendedheavyitalic.ttf new file mode 100644 index 0000000..5c92346 Binary files /dev/null and b/static/ttf/iosevk-abbie-extendedheavyitalic.ttf differ diff --git a/static/ttf/iosevk-abbie-extendedheavyoblique.ttf b/static/ttf/iosevk-abbie-extendedheavyoblique.ttf new file mode 100644 index 0000000..03dfdc2 Binary files /dev/null and b/static/ttf/iosevk-abbie-extendedheavyoblique.ttf differ diff --git a/static/ttf/iosevk-abbie-extendeditalic.ttf b/static/ttf/iosevk-abbie-extendeditalic.ttf new file mode 100644 index 0000000..0094a3b Binary files /dev/null and b/static/ttf/iosevk-abbie-extendeditalic.ttf differ diff --git a/static/ttf/iosevk-abbie-extendedmedium.ttf b/static/ttf/iosevk-abbie-extendedmedium.ttf new file mode 100644 index 0000000..764d0e1 Binary files /dev/null and b/static/ttf/iosevk-abbie-extendedmedium.ttf differ diff --git a/static/ttf/iosevk-abbie-extendedmediumitalic.ttf b/static/ttf/iosevk-abbie-extendedmediumitalic.ttf new file mode 100644 index 0000000..52e40a7 Binary files /dev/null and b/static/ttf/iosevk-abbie-extendedmediumitalic.ttf differ diff --git a/static/ttf/iosevk-abbie-extendedmediumoblique.ttf b/static/ttf/iosevk-abbie-extendedmediumoblique.ttf new file mode 100644 index 0000000..068bac4 Binary files /dev/null and b/static/ttf/iosevk-abbie-extendedmediumoblique.ttf differ diff --git a/static/ttf/iosevk-abbie-extendedoblique.ttf b/static/ttf/iosevk-abbie-extendedoblique.ttf new file mode 100644 index 0000000..1ccd63e Binary files /dev/null and b/static/ttf/iosevk-abbie-extendedoblique.ttf differ diff --git a/static/ttf/iosevk-abbie-extendedsemibold.ttf b/static/ttf/iosevk-abbie-extendedsemibold.ttf new file mode 100644 index 0000000..1e5c5ed Binary files /dev/null and b/static/ttf/iosevk-abbie-extendedsemibold.ttf differ diff --git a/static/ttf/iosevk-abbie-extendedsemibolditalic.ttf b/static/ttf/iosevk-abbie-extendedsemibolditalic.ttf new file mode 100644 index 0000000..efe8106 Binary files /dev/null and b/static/ttf/iosevk-abbie-extendedsemibolditalic.ttf differ diff --git a/static/ttf/iosevk-abbie-extendedsemiboldoblique.ttf b/static/ttf/iosevk-abbie-extendedsemiboldoblique.ttf new file mode 100644 index 0000000..ed7b9ee Binary files /dev/null and b/static/ttf/iosevk-abbie-extendedsemiboldoblique.ttf differ diff --git a/static/ttf/iosevk-abbie-extrabold.ttf b/static/ttf/iosevk-abbie-extrabold.ttf new file mode 100644 index 0000000..4820ea6 Binary files /dev/null and b/static/ttf/iosevk-abbie-extrabold.ttf differ diff --git a/static/ttf/iosevk-abbie-extrabolditalic.ttf b/static/ttf/iosevk-abbie-extrabolditalic.ttf new file mode 100644 index 0000000..66da859 Binary files /dev/null and b/static/ttf/iosevk-abbie-extrabolditalic.ttf differ diff --git a/static/ttf/iosevk-abbie-extraboldoblique.ttf b/static/ttf/iosevk-abbie-extraboldoblique.ttf new file mode 100644 index 0000000..84bbda2 Binary files /dev/null and b/static/ttf/iosevk-abbie-extraboldoblique.ttf differ diff --git a/static/ttf/iosevk-abbie-heavy.ttf b/static/ttf/iosevk-abbie-heavy.ttf new file mode 100644 index 0000000..8eaaf4e Binary files /dev/null and b/static/ttf/iosevk-abbie-heavy.ttf differ diff --git a/static/ttf/iosevk-abbie-heavyitalic.ttf b/static/ttf/iosevk-abbie-heavyitalic.ttf new file mode 100644 index 0000000..554119c Binary files /dev/null and b/static/ttf/iosevk-abbie-heavyitalic.ttf differ diff --git a/static/ttf/iosevk-abbie-heavyoblique.ttf b/static/ttf/iosevk-abbie-heavyoblique.ttf new file mode 100644 index 0000000..d99c6e6 Binary files /dev/null and b/static/ttf/iosevk-abbie-heavyoblique.ttf differ diff --git a/static/ttf/iosevk-abbie-italic.ttf b/static/ttf/iosevk-abbie-italic.ttf new file mode 100644 index 0000000..417a8d8 Binary files /dev/null and b/static/ttf/iosevk-abbie-italic.ttf differ diff --git a/static/ttf/iosevk-abbie-medium.ttf b/static/ttf/iosevk-abbie-medium.ttf new file mode 100644 index 0000000..c22be8d Binary files /dev/null and b/static/ttf/iosevk-abbie-medium.ttf differ diff --git a/static/ttf/iosevk-abbie-mediumitalic.ttf b/static/ttf/iosevk-abbie-mediumitalic.ttf new file mode 100644 index 0000000..29e85ea Binary files /dev/null and b/static/ttf/iosevk-abbie-mediumitalic.ttf differ diff --git a/static/ttf/iosevk-abbie-mediumoblique.ttf b/static/ttf/iosevk-abbie-mediumoblique.ttf new file mode 100644 index 0000000..b3f8ccd Binary files /dev/null and b/static/ttf/iosevk-abbie-mediumoblique.ttf differ diff --git a/static/ttf/iosevk-abbie-oblique.ttf b/static/ttf/iosevk-abbie-oblique.ttf new file mode 100644 index 0000000..6e019e9 Binary files /dev/null and b/static/ttf/iosevk-abbie-oblique.ttf differ diff --git a/static/ttf/iosevk-abbie-regular.ttf b/static/ttf/iosevk-abbie-regular.ttf new file mode 100644 index 0000000..d60fa9b Binary files /dev/null and b/static/ttf/iosevk-abbie-regular.ttf differ diff --git a/static/ttf/iosevk-abbie-semibold.ttf b/static/ttf/iosevk-abbie-semibold.ttf new file mode 100644 index 0000000..9908e1d Binary files /dev/null and b/static/ttf/iosevk-abbie-semibold.ttf differ diff --git a/static/ttf/iosevk-abbie-semibolditalic.ttf b/static/ttf/iosevk-abbie-semibolditalic.ttf new file mode 100644 index 0000000..010157e Binary files /dev/null and b/static/ttf/iosevk-abbie-semibolditalic.ttf differ diff --git a/static/ttf/iosevk-abbie-semiboldoblique.ttf b/static/ttf/iosevk-abbie-semiboldoblique.ttf new file mode 100644 index 0000000..1baa57a Binary files /dev/null and b/static/ttf/iosevk-abbie-semiboldoblique.ttf differ diff --git a/static/verify_error.png b/static/verify_error.png new file mode 100644 index 0000000..d17826f Binary files /dev/null and b/static/verify_error.png differ diff --git a/static/verify_warn.png b/static/verify_warn.png new file mode 100644 index 0000000..069ef12 Binary files /dev/null and b/static/verify_warn.png differ diff --git a/static/woff2/iosevk-abbie-bold.woff2 b/static/woff2/iosevk-abbie-bold.woff2 new file mode 100644 index 0000000..c5a800d Binary files /dev/null and b/static/woff2/iosevk-abbie-bold.woff2 differ diff --git a/static/woff2/iosevk-abbie-bolditalic.woff2 b/static/woff2/iosevk-abbie-bolditalic.woff2 new file mode 100644 index 0000000..b80dc39 Binary files /dev/null and b/static/woff2/iosevk-abbie-bolditalic.woff2 differ diff --git a/static/woff2/iosevk-abbie-boldoblique.woff2 b/static/woff2/iosevk-abbie-boldoblique.woff2 new file mode 100644 index 0000000..46569b2 Binary files /dev/null and b/static/woff2/iosevk-abbie-boldoblique.woff2 differ diff --git a/static/woff2/iosevk-abbie-extended.woff2 b/static/woff2/iosevk-abbie-extended.woff2 new file mode 100644 index 0000000..6916d87 Binary files /dev/null and b/static/woff2/iosevk-abbie-extended.woff2 differ diff --git a/static/woff2/iosevk-abbie-extendedbold.woff2 b/static/woff2/iosevk-abbie-extendedbold.woff2 new file mode 100644 index 0000000..bf8badc Binary files /dev/null and b/static/woff2/iosevk-abbie-extendedbold.woff2 differ diff --git a/static/woff2/iosevk-abbie-extendedbolditalic.woff2 b/static/woff2/iosevk-abbie-extendedbolditalic.woff2 new file mode 100644 index 0000000..20805dc Binary files /dev/null and b/static/woff2/iosevk-abbie-extendedbolditalic.woff2 differ diff --git a/static/woff2/iosevk-abbie-extendedboldoblique.woff2 b/static/woff2/iosevk-abbie-extendedboldoblique.woff2 new file mode 100644 index 0000000..c834e03 Binary files /dev/null and b/static/woff2/iosevk-abbie-extendedboldoblique.woff2 differ diff --git a/static/woff2/iosevk-abbie-extendedextrabold.woff2 b/static/woff2/iosevk-abbie-extendedextrabold.woff2 new file mode 100644 index 0000000..8b1941c Binary files /dev/null and b/static/woff2/iosevk-abbie-extendedextrabold.woff2 differ diff --git a/static/woff2/iosevk-abbie-extendedextrabolditalic.woff2 b/static/woff2/iosevk-abbie-extendedextrabolditalic.woff2 new file mode 100644 index 0000000..057ff1d Binary files /dev/null and b/static/woff2/iosevk-abbie-extendedextrabolditalic.woff2 differ diff --git a/static/woff2/iosevk-abbie-extendedextraboldoblique.woff2 b/static/woff2/iosevk-abbie-extendedextraboldoblique.woff2 new file mode 100644 index 0000000..07fed79 Binary files /dev/null and b/static/woff2/iosevk-abbie-extendedextraboldoblique.woff2 differ diff --git a/static/woff2/iosevk-abbie-extendedheavy.woff2 b/static/woff2/iosevk-abbie-extendedheavy.woff2 new file mode 100644 index 0000000..42c5223 Binary files /dev/null and b/static/woff2/iosevk-abbie-extendedheavy.woff2 differ diff --git a/static/woff2/iosevk-abbie-extendedheavyitalic.woff2 b/static/woff2/iosevk-abbie-extendedheavyitalic.woff2 new file mode 100644 index 0000000..b2f0abc Binary files /dev/null and b/static/woff2/iosevk-abbie-extendedheavyitalic.woff2 differ diff --git a/static/woff2/iosevk-abbie-extendedheavyoblique.woff2 b/static/woff2/iosevk-abbie-extendedheavyoblique.woff2 new file mode 100644 index 0000000..b082e48 Binary files /dev/null and b/static/woff2/iosevk-abbie-extendedheavyoblique.woff2 differ diff --git a/static/woff2/iosevk-abbie-extendeditalic.woff2 b/static/woff2/iosevk-abbie-extendeditalic.woff2 new file mode 100644 index 0000000..5dd5dc6 Binary files /dev/null and b/static/woff2/iosevk-abbie-extendeditalic.woff2 differ diff --git a/static/woff2/iosevk-abbie-extendedmedium.woff2 b/static/woff2/iosevk-abbie-extendedmedium.woff2 new file mode 100644 index 0000000..443d15c Binary files /dev/null and b/static/woff2/iosevk-abbie-extendedmedium.woff2 differ diff --git a/static/woff2/iosevk-abbie-extendedmediumitalic.woff2 b/static/woff2/iosevk-abbie-extendedmediumitalic.woff2 new file mode 100644 index 0000000..9312713 Binary files /dev/null and b/static/woff2/iosevk-abbie-extendedmediumitalic.woff2 differ diff --git a/static/woff2/iosevk-abbie-extendedmediumoblique.woff2 b/static/woff2/iosevk-abbie-extendedmediumoblique.woff2 new file mode 100644 index 0000000..30a3843 Binary files /dev/null and b/static/woff2/iosevk-abbie-extendedmediumoblique.woff2 differ diff --git a/static/woff2/iosevk-abbie-extendedoblique.woff2 b/static/woff2/iosevk-abbie-extendedoblique.woff2 new file mode 100644 index 0000000..4557c41 Binary files /dev/null and b/static/woff2/iosevk-abbie-extendedoblique.woff2 differ diff --git a/static/woff2/iosevk-abbie-extendedsemibold.woff2 b/static/woff2/iosevk-abbie-extendedsemibold.woff2 new file mode 100644 index 0000000..cd41ee0 Binary files /dev/null and b/static/woff2/iosevk-abbie-extendedsemibold.woff2 differ diff --git a/static/woff2/iosevk-abbie-extendedsemibolditalic.woff2 b/static/woff2/iosevk-abbie-extendedsemibolditalic.woff2 new file mode 100644 index 0000000..2a3228e Binary files /dev/null and b/static/woff2/iosevk-abbie-extendedsemibolditalic.woff2 differ diff --git a/static/woff2/iosevk-abbie-extendedsemiboldoblique.woff2 b/static/woff2/iosevk-abbie-extendedsemiboldoblique.woff2 new file mode 100644 index 0000000..c41498d Binary files /dev/null and b/static/woff2/iosevk-abbie-extendedsemiboldoblique.woff2 differ diff --git a/static/woff2/iosevk-abbie-extrabold.woff2 b/static/woff2/iosevk-abbie-extrabold.woff2 new file mode 100644 index 0000000..5d21d6a Binary files /dev/null and b/static/woff2/iosevk-abbie-extrabold.woff2 differ diff --git a/static/woff2/iosevk-abbie-extrabolditalic.woff2 b/static/woff2/iosevk-abbie-extrabolditalic.woff2 new file mode 100644 index 0000000..7be709b Binary files /dev/null and b/static/woff2/iosevk-abbie-extrabolditalic.woff2 differ diff --git a/static/woff2/iosevk-abbie-extraboldoblique.woff2 b/static/woff2/iosevk-abbie-extraboldoblique.woff2 new file mode 100644 index 0000000..0a2b1e9 Binary files /dev/null and b/static/woff2/iosevk-abbie-extraboldoblique.woff2 differ diff --git a/static/woff2/iosevk-abbie-heavy.woff2 b/static/woff2/iosevk-abbie-heavy.woff2 new file mode 100644 index 0000000..2aa1de6 Binary files /dev/null and b/static/woff2/iosevk-abbie-heavy.woff2 differ diff --git a/static/woff2/iosevk-abbie-heavyitalic.woff2 b/static/woff2/iosevk-abbie-heavyitalic.woff2 new file mode 100644 index 0000000..d348b19 Binary files /dev/null and b/static/woff2/iosevk-abbie-heavyitalic.woff2 differ diff --git a/static/woff2/iosevk-abbie-heavyoblique.woff2 b/static/woff2/iosevk-abbie-heavyoblique.woff2 new file mode 100644 index 0000000..7bdd72e Binary files /dev/null and b/static/woff2/iosevk-abbie-heavyoblique.woff2 differ diff --git a/static/woff2/iosevk-abbie-italic.woff2 b/static/woff2/iosevk-abbie-italic.woff2 new file mode 100644 index 0000000..30ebec0 Binary files /dev/null and b/static/woff2/iosevk-abbie-italic.woff2 differ diff --git a/static/woff2/iosevk-abbie-medium.woff2 b/static/woff2/iosevk-abbie-medium.woff2 new file mode 100644 index 0000000..42354f4 Binary files /dev/null and b/static/woff2/iosevk-abbie-medium.woff2 differ diff --git a/static/woff2/iosevk-abbie-mediumitalic.woff2 b/static/woff2/iosevk-abbie-mediumitalic.woff2 new file mode 100644 index 0000000..68bed1a Binary files /dev/null and b/static/woff2/iosevk-abbie-mediumitalic.woff2 differ diff --git a/static/woff2/iosevk-abbie-mediumoblique.woff2 b/static/woff2/iosevk-abbie-mediumoblique.woff2 new file mode 100644 index 0000000..699eb5d Binary files /dev/null and b/static/woff2/iosevk-abbie-mediumoblique.woff2 differ diff --git a/static/woff2/iosevk-abbie-oblique.woff2 b/static/woff2/iosevk-abbie-oblique.woff2 new file mode 100644 index 0000000..8903d9e Binary files /dev/null and b/static/woff2/iosevk-abbie-oblique.woff2 differ diff --git a/static/woff2/iosevk-abbie-regular.woff2 b/static/woff2/iosevk-abbie-regular.woff2 new file mode 100644 index 0000000..89e3c8f Binary files /dev/null and b/static/woff2/iosevk-abbie-regular.woff2 differ diff --git a/static/woff2/iosevk-abbie-semibold.woff2 b/static/woff2/iosevk-abbie-semibold.woff2 new file mode 100644 index 0000000..f0d8b5f Binary files /dev/null and b/static/woff2/iosevk-abbie-semibold.woff2 differ diff --git a/static/woff2/iosevk-abbie-semibolditalic.woff2 b/static/woff2/iosevk-abbie-semibolditalic.woff2 new file mode 100644 index 0000000..53dc703 Binary files /dev/null and b/static/woff2/iosevk-abbie-semibolditalic.woff2 differ diff --git a/static/woff2/iosevk-abbie-semiboldoblique.woff2 b/static/woff2/iosevk-abbie-semiboldoblique.woff2 new file mode 100644 index 0000000..0988841 Binary files /dev/null and b/static/woff2/iosevk-abbie-semiboldoblique.woff2 differ diff --git a/sync b/sync new file mode 100755 index 0000000..b4d77a9 --- /dev/null +++ b/sync @@ -0,0 +1,3 @@ +#!/usr/bin/env bash + +rsync .site/ "shamiko:/var/www/amelia.how" -avx \ No newline at end of file diff --git a/syntax/amcprove.xml b/syntax/amcprove.xml new file mode 100644 index 0000000..dc8e032 --- /dev/null +++ b/syntax/amcprove.xml @@ -0,0 +1,190 @@ + + + + + + + + + + + + +?@^|-~][:!#$%&*+\\/<=>?@^|-~\.]*"> +]> + + + + + forall + fun + function + lazy + match + not + yes + probably + + + + tt + ff + + + + + + + + + + Amc + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/syntax/amulet.xml b/syntax/amulet.xml new file mode 100644 index 0000000..a7a0c6a --- /dev/null +++ b/syntax/amulet.xml @@ -0,0 +1,222 @@ + + + + + + + + + + + + +?@^|-~][:!#$%&*+\\/<=>?@^|-~\.]*"> +]> + + + + + as + forall + begin + class + else + end + external + fun + function + if + in + lazy + let + match + module + of + open + then + type + val + with + instance + rec + import + and + + + + true + false + + + + + + + string + int + float + bool + unit + lazy + list + constraint + ref + + known_string + known_int + row_cons + + + + Amc + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/templates/archive.html b/templates/archive.html new file mode 100644 index 0000000..513637b --- /dev/null +++ b/templates/archive.html @@ -0,0 +1,6 @@ +
+
+ Here you can find all my previous posts: + $partial("templates/post-list.html")$ +
+
diff --git a/templates/default.html b/templates/default.html new file mode 100644 index 0000000..6c710cb --- /dev/null +++ b/templates/default.html @@ -0,0 +1,71 @@ + + + + + + + $title$ + + + + + + + + + + + + + + + + + + + + + + + + +
+ + +
+ +
+
+

$title$

+ $if(subtitle)$ +
+

$subtitle$

+
+ $endif$ +
+ $body$ +
+ + + + + + + + diff --git a/templates/page.html b/templates/page.html new file mode 100644 index 0000000..269da5d --- /dev/null +++ b/templates/page.html @@ -0,0 +1,5 @@ +
+
+ $body$ +
+
\ No newline at end of file diff --git a/templates/post-list.html b/templates/post-list.html new file mode 100644 index 0000000..f9c071f --- /dev/null +++ b/templates/post-list.html @@ -0,0 +1,15 @@ +
    + $for(posts)$ +
  • +
    +
    + $title$ + $date$ +
    +
    + $synopsys$ +
    +
    +
  • + $endfor$ +
\ No newline at end of file diff --git a/templates/post.html b/templates/post.html new file mode 100644 index 0000000..9ccef85 --- /dev/null +++ b/templates/post.html @@ -0,0 +1,15 @@ +
+ +
+
+ Posted on $date$
+ Word count: $words$
+
+ $body$ +
+
\ No newline at end of file diff --git a/templates/tikz.tex b/templates/tikz.tex new file mode 100644 index 0000000..dd28d2b --- /dev/null +++ b/templates/tikz.tex @@ -0,0 +1,39 @@ +\documentclass{article} +\usepackage[pdftex,active,tightpage]{preview} +\usepackage{amsmath} +\usepackage{amssymb} +\usepackage{tikz} +\usetikzlibrary{matrix} +\usetikzlibrary{calc} +\usetikzlibrary{fit} +\usetikzlibrary{arrows} +\usetikzlibrary{arrows.meta} +\usetikzlibrary{decorations.pathreplacing} +\usepackage{xcolor} +\definecolor{red}{RGB}{235, 77, 75} +\definecolor{blue}{RGB}{9, 123, 227} + +\tikzset{ + ncbar angle/.initial=90, + ncbar/.style={ + to path=(\tikztostart) + -- ($$(\tikztostart)!#1!\pgfkeysvalueof{/tikz/ncbar angle}:(\tikztotarget)$$) + -- ($$(\tikztotarget)!($$(\tikztostart)!#1!\pgfkeysvalueof{/tikz/ncbar angle}:(\tikztotarget)$$)!\pgfkeysvalueof{/tikz/ncbar angle}:(\tikztostart)$$) + -- (\tikztotarget) + }, + ncbar/.default=0.5cm, +} + +\tikzset{square left brace/.style={ncbar=0.5cm}} +\tikzset{square right brace/.style={ncbar=-0.5cm}} + +\tikzset{round left paren/.style={ncbar=0.5cm,out=120,in=-120}} +\tikzset{round right paren/.style={ncbar=0.5cm,out=60,in=-60}} + +\begin{document} +\begin{preview} + \begin{tikzpicture}[auto] + $body$ + \end{tikzpicture} +\end{preview} +\end{document}