Compare commits

..

No commits in common. "master" and "colors" have entirely different histories.

542 changed files with 20404 additions and 21403 deletions

18
.gitmodules vendored
View File

@ -1,18 +0,0 @@
[submodule "code/aoc-2020"]
path = code/aoc-2020
url = https://dev.danilafe.com/Advent-of-Code/AdventOfCode-2020.git
[submodule "themes/vanilla"]
path = themes/vanilla
url = https://dev.danilafe.com/Web-Projects/vanilla-hugo.git
[submodule "code/server-config"]
path = code/server-config
url = https://dev.danilafe.com/Nix-Configs/server-config
[submodule "code/blog-static-flake"]
path = code/blog-static-flake
url = https://dev.danilafe.com/Nix-Configs/blog-static-flake.git
[submodule "code/compiler"]
path = code/compiler
url = https://dev.danilafe.com/DanilaFe/bloglang.git
[submodule "code/agda-spa"]
path = code/agda-spa
url = https://dev.danilafe.com/DanilaFe/agda-spa.git

View File

@ -1,9 +0,0 @@
# frozen_string_literal: true
source "https://rubygems.org"
git_source(:github) {|repo_name| "https://github.com/#{repo_name}" }
gem 'nokogiri'
gem 'execjs'
gem 'duktape'

View File

@ -1,21 +0,0 @@
GEM
remote: https://rubygems.org/
specs:
duktape (2.7.0.0)
execjs (2.9.1)
mini_portile2 (2.8.6)
nokogiri (1.15.6)
mini_portile2 (~> 2.8.2)
racc (~> 1.4)
racc (1.8.0)
PLATFORMS
ruby
DEPENDENCIES
duktape
execjs
nokogiri
BUNDLED WITH
2.1.4

351
agda.rb
View File

@ -1,351 +0,0 @@
require "nokogiri"
require "pathname"
files = ARGV[0..-1]
class LineInfo
attr_accessor :links
def initialize
@links = []
end
end
class AgdaContext
def initialize
@file_infos = {}
end
# Traverse the preformatted Agda block in the given Agda HTML file
# and find which textual ranges have IDs and links to other ranges.
# Store this information in a hash, line => LineInfo.
def process_agda_html_file(file)
return @file_infos[file] if @file_infos.include? file
@file_infos[file] = line_infos = {}
unless File.exists?(file)
return line_infos
end
document = Nokogiri::HTML.parse(File.open(file))
pre_code = document.css("pre.Agda")[0]
# The traversal postorder; we always visit children before their
# parents, and we visit leaves in sequence.
offset = 0
line = 1
pre_code.traverse do |at|
# Text nodes are always leaves; visiting a new leaf means we've advanced
# in the text by the length of that text. However, if there are newlines
# -- since this is a preformatted block -- we also advanced by a line.
# At this time, do not support links that span multiple lines, but
# Agda doesn't produce those either.
if at.text?
if at.content.include? "\n"
# This textual leaf is at least part whitespace. The logic
# assumes that links can't span multiple pages, and that links
# aren't nested, so ensure that the parent of the textual node
# is the preformatted block itself.
raise "unsupported Agda HTML output" if at.parent.name != "pre"
# Increase the line and track the final offset. Written as a loop
# in case we eventually want to add some handling for the pieces
# sandwiched between newlines.
at.content.split("\n", -1).each_with_index do |bit, idx|
line += 1 unless idx == 0
offset = bit.length
end
else
# It's not a newline node, so it could be anywhere. All we need to
# do is adjust the offset within the full pre block's text.
offset += at.content.length
end
elsif at.name == "a"
# We found a link. Agda emits both links and "things to link to" as
# 'a' nodes, so check for either, and record them. Even if
# the link is nested, the .content.length accessor will only
# retrieve the textual content, and thus -- assuming the link
# isn't split across lines -- will find the proper from-to range.
line_info = line_infos.fetch(line) { line_infos[line] = LineInfo.new }
href = at.attribute("href")
id = at.attribute("id")
if href or id
new_node = { :from => offset-at.content.length, :to => offset }
new_node[:href] = href if href
new_node[:id] = id if id
line_info.links << new_node
end
end
end
return line_infos
end
end
class FileGroup
def initialize(agda_context)
@agda_context = agda_context
# Agda HTML href -> list of (file, Hugo HTML node that links to it)
@nodes_referencing_href = {}
# Agda HTML href -> (its new ID in Hugo-land, file in which it's defined)
# This supports cross-post linking within a seires.
@global_seen_hrefs = {}
# file name -> Agda HTML href -> its new ID in Hugo-land
# This supports linking within a particular post.
@local_seen_hrefs = Hash.new { {} }
# Global counter to generate fresh IDs. There's no reason for it to
# be global within a series (since IDs are namespaced by the file they're in),
# but it's just more convenient this way.
@id_counter = 0
end
def note_defined_href(file, href)
file_hrefs = @local_seen_hrefs.fetch(file) do
@local_seen_hrefs[file] = {}
end
uniq_id = file_hrefs.fetch(href) do
new_id = "agda-unique-ident-#{@id_counter}"
@id_counter += 1
file_hrefs[href] = new_id
end
unless @global_seen_hrefs.include? href
@global_seen_hrefs[href] = { :file => file, :id => uniq_id }
end
return uniq_id
end
def note_used_href(file, node, href)
ref_list = @nodes_referencing_href.fetch(href) { @nodes_referencing_href[href] = [] }
ref_list << { :file => file, :node => node }
return href
end
# Given a Hugo HTML file which references potentially several Agda modules
# in code blocks, insert links into the code blocks.
#
# There are several things we need to take care of:
# 1. Finding the HTML files associated with each referenced Agda module.
# For this, we make use of the data-base-path etc. attributes that
# the vanilla theme inserts.
# 2. "zipping together" the Agda and Hugo HTML representations. Each of
# them encode the code, but they use different HTML elements and structures.
# So, given a Hugo HTML code block, traverse its textual contents
# and find any that are covered by links in the related Agda HTML file.
# 3. Fixing up links: the Agda HTML links assume each module has its own HTML
# file. This isn't true for us: multiple modules are stitched into
# one Hugo HTML file. Also, we don't include the entire Agda HTML
# file in the Hugo HTML, so some links may be broken. So, find IDs
# that are visible in the Hugo file, rename them to be globally unique,
# and re-write cross-file links that reference these IDs to point
# inside the Hugo file.
def process_source_file(file, document)
# Process each highlight group that's been marked as an Agda file.
document.css('div[data-agda-block]').each do |t|
first_line, last_line = nil, nil
if first_line_attr = t.attribute("data-first-line")
first_line = first_line_attr.to_s.to_i
end
if last_line_attr = t.attribute("data-last-line")
last_line = last_line_attr.to_s.to_i
end
if first_line and last_line
line_range = first_line..last_line
else
line_range = 1..
end
# Sometimes, code is deeply nested in the source file, but we don't
# want to show the leading space. In that case, the generator sets
# data-source-offset with how much leading space was stripped off.
initial_offset = 0
if source_offset_attr = t.attribute("data-source-offset")
initial_offset = source_offset_attr.to_s.to_i
end
full_path = t.attribute("data-file-path").to_s
full_path_dirs = Pathname(full_path).each_filename.to_a
# The name of an Agda module is determined from its directory
# structure: A/B/C.agda creates A.B.C.html. Depending on where
# the code is included, there might be some additional folders
# that precede A that we want to ignore.
base_path = t.attribute("data-base-path").to_s
base_dir_depth = 0
if base_path.empty?
# No submodules were used. Assume code/<X> is the root, since
# that's the code layout of the blog right now.
base_dir_depth = 1
base_path = full_path_dirs[0]
else
# The code is in a submodule. Assume that the base path / submodule
# root is the Agda module root, ignore all folders before that.
base_path_dirs = Pathname(base_path).each_filename.to_a
base_dir_depth = base_path_dirs.length
end
dirs_in_base = full_path_dirs[base_dir_depth..-1]
html_file = dirs_in_base.join(".").gsub(/\.agda$/, ".html")
html_path = File.join(["code", base_path, "html", html_file])
agda_info = @agda_context.process_agda_html_file(html_path)
# Hugo conveniently generates a bunch of spans, each encoding a line
# of code output. We can iterate over these and match them up with
# the line numbers we got from reading the Agda HTML output.
lines = t.css("pre.chroma code[data-lang] .line")
lines.zip(line_range).each do |line, line_no|
line_info = agda_info[line_no]
next unless line_info
offset = initial_offset
line.traverse do |lt|
if lt.text?
content = lt.content
new_offset = offset + content.length
# The span/a/etc. structure of the Agda and Hugo HTML files
# need not line up; it's possible for there to be a single link
# in the Agda file that's broken up across multiple HTML nodes
# in the Hugo output. For now, just don't link those, since inserting
# such overlapping links is relatively complicated. Instead,
# require links to fit fully within a current text node (and thus,
# not overlap the boundaries of any HTML).
matching_links = line_info.links.filter do |link|
link[:from] >= offset and link[:to] <= new_offset
end
# A given text node can be broken into any number of sub-nodes,
# where some sub-nodes are still text, and others have been turned
# into links. Store the new pieces in replace_with. E.g.,
#
# Original:
# abc
#
# New:
# a<a href="..">b</a>c
#
# replace_with:
# ["a", <Nokogiri::XML::Node...>, "c"]
#
# match_offset represents how much of the original text we've
# already converted. The below iteration assumes that matching
# links are in order, and don't overlap.
replace_with = []
replace_offset = 0
matching_links.each do |match|
# The link's range is an offset from the beginning of the line,
# but the text piece we're splitting up might be partway into
# the line. Convert the link coordinates to piece-relative ones.
relative_from = match[:from] - offset
relative_to = match[:to] - offset
# If the previous link ended some time before the new link
# began (or if the current link is the first one, and is not
# at the beginning), ensure that the plain text "in between"
# is kept.
replace_with << content[replace_offset...relative_from]
tag = (match.include? :href) ? 'a' : 'span'
new_node = Nokogiri::XML::Node.new(tag, document)
if match.include? :href
# For nodes with links, note what they're referring to, so
# we can adjust their hrefs when we assign global IDs.
href = match[:href].to_s
new_node['href'] = note_used_href file, new_node, href
end
if match.include? :id
# For nodes with IDs visible in the current Hugo file, we'll
# want to redirect links that previously go to other Agda
# module HTML files. So, note the ID that we want to redirect,
# and pick a new unique ID to replace it with.
id = match[:id].to_s
new_node['id'] = note_defined_href file, "#{html_file}##{id}"
end
new_node.content = content[relative_from...relative_to]
replace_with << new_node
replace_offset = relative_to
end
replace_with << content[replace_offset..-1]
# Finally, replace the node under consideration with the new
# pieces.
replace_with.each do |replacement|
lt.add_previous_sibling replacement
end
lt.remove
offset = new_offset
end
end
end
end
end
def cross_link_files
# Now, we have a complete list of all the IDs visible in scope.
# Redirect relevant links to these IDs. This achieves within-post
# links.
@nodes_referencing_href.each do |href, references|
references.each do |reference|
file = reference[:file]
node = reference[:node]
local_targets = @local_seen_hrefs[file]
if local_targets.include? href
# A code block in this fine provides this href, create a local link.
node['href'] = "##{local_targets[href]}"
elsif @global_seen_hrefs.include? href
# A code block in this series, but not in this file, defines
# this href. Create a cross-file link.
target = @global_seen_hrefs[href]
other_file = target[:file]
id = target[:id]
relpath = Pathname.new(other_file).dirname.relative_path_from(Pathname.new(file).dirname)
node['href'] = "#{relpath}##{id}"
else
# No definitions in any blog page. For now, just delete the anchor.
node.replace node.content
end
end
end
end
end
agda_context = AgdaContext.new
file_documents = {}
series_groups = files.group_by do |file|
file_documents[file] = document = Nokogiri::HTML.parse(File.open(file))
document.css("meta[name=blog-series]")&.attribute("content")&.to_s
end
# For the 'nil' group, process individually.
if files_with_no_series = series_groups.delete(nil)
files_with_no_series.each do |file|
file_group = FileGroup.new agda_context
file_group.process_source_file file, file_documents[file]
file_group.cross_link_files
end
end
# For groups, process them together to allow cross-linking
series_groups.each do |_, files_in_series|
file_group = FileGroup.new agda_context
files_in_series.each do |file|
file_group.process_source_file file, file_documents[file]
end
file_group.cross_link_files
end
# Having modified all the HTML files, save them.
file_documents.each do |file, document|
File.write(file, document.to_html(encoding: 'UTF-8'))
end

View File

@ -1,110 +0,0 @@
require "pathname"
require "set"
require "json"
def resolve_path(bp, p)
path = nil
if bp.start_with? "."
path = Pathname.new(File.join(bp, p)).cleanpath.to_s
elsif p.start_with? "blog/"
path = File.join("content", p)
else
path = File.join("content", "blog", p)
end
if File.directory? path
path = File.join(path, "index.md")
elsif !path.end_with? ".md"
path += ".md"
end
path.gsub("blog/blog/", "blog/")
end
files = Set.new
refs = {}
Dir['content/blog/**/*.md'].each do |file|
file = file.chomp
files << file
arr = refs[file] || (refs[file] = [])
pattern = Regexp.union(/< relref "([^"]+)" >/, /< draftlink "[^"]+" "([^"]+)" >/)
File.open(file).read.scan(pattern) do |ref|
ref = resolve_path(File.dirname(file), ref[0] || ref[1])
arr << ref
files << ref
end
arr.uniq!
end
data = {}
id = 0
series = {}
files.each do |file|
id += 1
name = file
tags = []
group = 1
draft = false
next unless File.exists?(file)
value = File.size(file)
url = file.gsub(/^content/, "https://danilafe.com").delete_suffix("/index.md").delete_suffix(".md")
File.readlines(file).each do |l|
if l =~ /^title: (.+)$/
name = $~[1].delete_prefix('"').delete_suffix('"')
elsif l =~ /^draft: true$/
draft = true
elsif l =~ /^series: (.+)$/
this_series = $~[1]
series_list = series.fetch(this_series) do
series[this_series] = []
end
series_list << file
elsif l =~ /^tags: (.+)$/
tags = $~[1].delete_prefix("[").delete_suffix("]").split(/,\s?/).map { |it| it.gsub('"', '') }
if tags.include? "Compilers"
group = 2
elsif tags.include? "Coq"
group = 3
elsif tags.include? "Programming Languages"
group = 4
elsif tags.include? "Haskell"
group = 5
elsif tags.include? "Crystal"
group = 6
elsif tags.include? "Agda"
group = 7
elsif tags.include? "Hugo"
group = 8
end
end
end
next if draft
data[file] = { :id => id, :name => name, :group => group, :tags => tags, :url => url, :value => value }
end
edges = []
files.each do |file1|
# files.each do |file2|
# next if file1 == file2
# next unless data[file1][:tags].any? { |t| data[file2][:tags].include? t }
# edges << { :from => data[file1][:id], :to => data[file2][:id] }
# end
next unless frefs = refs[file1]
frefs.each do |ref|
next unless data[file1]
next unless data[ref]
edges << { :from => data[file1][:id], :to => data[ref][:id] }
end
end
series.each do |series, files|
files.sort.each_cons(2) do |file1, file2|
next unless data[file1]
next unless data[file2]
edges << { :from => data[file1][:id], :to => data[file2][:id] }
edges << { :from => data[file2][:id], :to => data[file1][:id] }
end
end
edges.uniq!
# edges.filter! { |e| e[:from] < e[:to] }
edges.map! { |e| { :from => [e[:from], e[:to]].min, :to => [e[:from], e[:to]].max } }.uniq!
puts ("export const nodes = " + JSON.pretty_unparse(data.values) + ";")
puts ("export const edges = " + JSON.pretty_unparse(edges) + ";")

View File

@ -1,65 +0,0 @@
LatexListNil @ latexlist(nil, nil) <-;
LatexListCons @ latexlist(cons(?x, ?xs), cons(?l_x, ?l_s)) <- latex(?x, ?l_x), latexlist(?xs, ?l_s);
IntercalateNil @ intercalate(?sep, nil, nil) <-;
IntercalateConsCons @ intercalate(?sep, cons(?x_1, cons(?x_2, ?xs)), cons(?x_1, cons(?sep, ?ys))) <- intercalate(?sep, cons(?x_2, ?xs), ?ys);
IntercalateConsNil @ intercalate(?sep, cons(?x, nil), cons(?x, nil)) <-;
NonEmpty @ nonempty(cons(?x, ?xs)) <-;
InListHere @ inlist(?e, cons(?e, ?es)) <-;
InListThere @ inlist(?e_1, cons(?e_2, ?es)) <- inlist(?e_1, ?es);
BasicParenLit @ paren(lit(?v), ?l) <- latex(lit(?v), ?l);
BasicParenVar @ paren(var(?x), ?l) <- latex(var(?x), ?l);
BasicParenVar @ paren(metavariable(?x), ?l) <- latex(metavariable(?x), ?l);
BasicParenOther @ paren(?t, ?l) <- latex(?t, ?l_t), join(["(", ?l_t, ")"], ?l);
LatexInt @ latex(?i, ?l) <- int(?i), tostring(?i, ?l);
LatexFloat @ latex(?f, ?l) <- float(?f), tostring(?f, ?l);
LatexStr @ latex(?s, ?l) <- str(?s), escapestring(?s, ?l_1), latexifystring(?s, ?l_2), join(["\\texttt{\"", ?l_2, "\"}"], ?l);
LatexMeta @ latex(metavariable(?l), ?l) <-;
LatexLit @ latex(lit(?i), ?l) <- latex(?i, ?l);
LatexVar @ latex(var(metavariable(?s)), ?l) <- latex(metavariable(?s), ?l);
LatexVar @ latex(var(?s), ?l) <- latex(?s, ?l_v), join(["\\texttt{", ?l_v, "}"], ?l);
LatexPlus @ latex(plus(?e_1, ?e_2), ?l) <-
paren(?e_1, ?l_1), paren(?e_2, ?l_2),
join([?l_1, " + ", ?l_2], ?l);
LatexMinus @ latex(minus(?e_1, ?e_2), ?l) <-
paren(?e_1, ?l_1), paren(?e_2, ?l_2),
join([?l_1, " - ", ?l_2], ?l);
EnvLiteralNil @ envlitrec(empty, "\\{", "", ?seen) <-;
EnvLiteralSingle @ envlitsingle(?pre, ?e, ?v, "", ?pre, ?seen) <- inlist(?e, ?seen);
EnvLiteralSingle @ envlitsingle(?pre, ?e, ?v, ?l, ", ", ?seen) <- latex(?e, ?l_e), latex(?v, ?l_v), join([?pre, "\\texttt{", ?l_e, "} \\mapsto", ?l_v], ?l);
EnvLiteralCons @ envlitrec(extend(empty, ?e, ?v), ?l, ?newnext, ?seen) <- envlitrec(?rho, ?l_rho, ?next, cons(?e, ?seen)), envlitsingle(?next, ?e, ?v, ?l_v, ?newnext, ?seen), join([?l_rho, ?l_v], ?l);
EnvLiteralCons @ envlitrec(extend(?rho, ?e, ?v), ?l, ?newnext, ?seen) <- envlitrec(?rho, ?l_rho, ?next, cons(?e, ?seen)), envlitsingle(?next, ?e, ?v, ?l_v, ?newnext, ?seen), join([?l_rho, ?l_v], ?l);
EnvLiteralOuter @ envlit(?rho, ?l) <- envlitrec(?rho, ?l_rho, ?rest, []), join([?l_rho, "\\}"], ?l);
LatexEnvLit @ latex(?rho, ?l) <- envlit(?rho, ?l);
LatexTypeEmpty @ latex(empty, "\\{\\}") <-;
LatexExtend @ latex(extend(?a, ?b, ?c), ?l) <- latex(?a, ?l_a), latex(?b, ?l_b), latex(?c, ?l_c), join([?l_a, "[", ?l_b, " \\mapsto ", ?l_c, "]"], ?l);
LatexInenv @ latex(inenv(?x, ?v, ?G), ?l) <-latex(?x, ?l_x), latex(?v, ?l_v), latex(?G, ?l_G), join([?l_G, "(", ?l_x, ") = ", ?l_v], ?l);
LatexEvalTer @ latex(eval(?G, ?e, ?t), ?l) <- latex(?G, ?l_G), latex(?e, ?l_e), latex(?t, ?l_t), join([?l_G, ",\\ ", ?l_e, " \\Downarrow ", ?l_t], ?l);
LatexAdd @ latex(add(?a, ?b, ?c), ?l) <- latex(?a, ?l_a), latex(?b, ?l_b), latex(?c, ?l_c), join([?l_a, "+", ?l_b, "=", ?l_c], ?l);
LatexSubtract @ latex(subtract(?a, ?b, ?c), ?l) <- latex(?a, ?l_a), latex(?b, ?l_b), latex(?c, ?l_c), join([?l_a, "-", ?l_b, "=", ?l_c], ?l);
LatexEvalTer @ latex(stepbasic(?G, ?e, ?H), ?l) <- latex(?G, ?l_G), latex(?e, ?l_e), latex(?H, ?l_H), join([?l_G, ",\\ ", ?l_e, " \\Rightarrow ", ?l_H], ?l);
LatexEvalTer @ latex(step(?G, ?e, ?H), ?l) <- latex(?G, ?l_G), latex(?e, ?l_e), latex(?H, ?l_H), join([?l_G, ",\\ ", ?l_e, " \\Rightarrow ", ?l_H], ?l);
LatexNoop @ latex(noop, "\\texttt{noop}") <-;
LatexAssign @ latex(assign(?x, ?e), ?l) <- latex(?x, ?l_x), latex(?e, ?l_e), join([?l_x, " = ", ?l_e], ?l);
LatexAssign @ latex(if(?e, ?s_1, ?s_2), ?l) <- latex(?e, ?l_e), latex(?s_1, ?l_1), latex(?s_2, ?l_2), join(["\\textbf{if}\\ ", ?l_e, "\\ \\{\\ ", ?l_1, "\\ \\}\\ \\textbf{else}\\ \\{\\ ", ?l_2, "\\ \\}"], ?l);
LatexAssign @ latex(while(?e, ?s), ?l) <- latex(?e, ?l_e), latex(?s, ?l_s), join(["\\textbf{while}\\ ", ?l_e, "\\ \\{\\ ", ?l_s, "\\ \\}"], ?l);
LatexAssign @ latex(seq(?s_1, ?s_2), ?l) <- latex(?s_1, ?l_1), latex(?s_2, ?l_2), join([?l_1, "; ", ?l_2], ?l);
LatexNumNeq @ latex(not(eq(?e_1, ?e_2)), ?l) <- latex(?e_1, ?l_1), latex(?e_2, ?l_2), join([?l_1, " \\neq ", ?l_2], ?l);
LatexNot @ latex(not(?e), ?l) <- latex(?e, ?l_e), join(["\\neg (", ?l_e, ")"], ?l);
LatexNumEq @ latex(eq(?e_1, ?e_2), ?l) <- latex(?e_1, ?l_1), latex(?e_2, ?l_2), join([?l_1, " = ", ?l_2], ?l);
LatexIsInt @ latex(int(?e), ?l) <- latex(?e, ?l_e), join([?l_e, " \\in \\texttt{Int}"], ?l);
LatexIsFloat @ latex(float(?e), ?l) <- latex(?e, ?l_e), join([?l_e, " \\in \\texttt{Float}"], ?l);
LatexIsNum @ latex(num(?e), ?l) <- latex(?e, ?l_e), join([?l_e, " \\in \\texttt{Num}"], ?l);
LatexIsStr @ latex(str(?e), ?l) <- latex(?e, ?l_e), join([?l_e, " \\in \\texttt{Str}"], ?l);
LatexSym @ latex(?s, ?l) <- sym(?s), tostring(?s, ?l_1), join(["\\text{", ?l_1,"}"], ?l);
LatexCall @ latex(?c, ?l) <- call(?c, ?n, ?ts), nonempty(?ts), latexlist(?ts, ?lts_1), intercalate(", ", ?lts_1, ?lts_2), join(?lts_2, ?lts_3), join(["\\text{", ?n, "}", "(", ?lts_3, ")"], ?l);

View File

@ -1,74 +0,0 @@
PrecApp @ prec(app(?l, ?r), 100, left) <-;
PrecPlus @ prec(plus(?l, ?r), 80, either) <-;
PrecAbs @ prec(abs(?x, ?t, ?e), 0, right) <-;
PrecArr @ prec(tarr(?l, ?r), 0, right) <-;
SelectHead @ select(cons([?t, ?v], ?rest), ?default, ?v) <- ?t;
SelectTail @ select(cons([?t, ?v], ?rest), ?default, ?found) <- not(?t), select(?rest, ?default, ?found);
SelectEmpty @ select(nil, ?default, ?default) <-;
Eq @ eq(?x, ?x) <-;
ParenthAssocLeft @ parenthassoc(?a_i, left, right) <-;
ParenthAssocRight @ parenthassoc(?a_i, right, left) <-;
ParenthAssocNone @ parenthassoc(?a_i, none, ?pos) <-;
ParenthAssocNeq @ parenthassoc(?a_i, ?a_o, ?pos) <- not(eq(?a_i, ?a_o));
Parenth @ parenth(?inner, ?outer, ?pos, ?strin, ?strout) <-
prec(?inner, ?p_i, ?a_i), prec(?outer, ?p_o, ?a_o),
join(["(", ?strin, ")"], ?strinparen),
select([ [less(?p_i, ?p_o), strinparen], [less(?p_o, ?p_i), ?strin], [ parenthassoc(?a_i, ?a_o, ?pos), ?strinparen ] ], ?strin, ?strout);
ParenthFallback @ parenth(?inner, ?outer, ?pos, ?strin, ?strin) <-;
LatexListNil @ latexlist(nil, nil) <-;
LatexListCons @ latexlist(cons(?x, ?xs), cons(?l_x, ?l_s)) <- latex(?x, ?l_x), latexlist(?xs, ?l_s);
IntercalateNil @ intercalate(?sep, nil, nil) <-;
IntercalateConsCons @ intercalate(?sep, cons(?x_1, cons(?x_2, ?xs)), cons(?x_1, cons(?sep, ?ys))) <- intercalate(?sep, cons(?x_2, ?xs), ?ys);
IntercalateConsNil @ intercalate(?sep, cons(?x, nil), cons(?x, nil)) <-;
NonEmpty @ nonempty(cons(?x, ?xs)) <-;
LatexInt @ latex(?i, ?l) <- int(?i), tostring(?i, ?l);
LatexFloat @ latex(?f, ?l) <- float(?f), tostring(?f, ?l);
LatexStr @ latex(?s, ?l) <- str(?s), escapestring(?s, ?l_1), latexifystring(?s, ?l_2), join(["\\texttt{\"", ?l_2, "\"}"], ?l);
LatexMeta @ latex(metavariable(?l), ?l) <-;
LatexLit @ latex(lit(?i), ?l) <- latex(?i, ?l);
LatexVar @ latex(var(?s), ?l) <- latex(?s, ?l);
LatexPlus @ latex(plus(?e_1, ?e_2), ?l) <-
latex(?e_1, ?l_1), latex(?e_2, ?l_2),
parenth(?e_1, plus(?e_1, ?e_2), left, ?l_1, ?lp_1),
parenth(?e_2, plus(?e_1, ?e_2), right, ?l_2, ?lp_2),
join([?lp_1, " + ", ?lp_2], ?l);
LatexPair @ latex(pair(?e_1, ?e_2), ?l) <- latex(?e_1, ?l_1), latex(?e_2, ?l_2), join(["(", ?l_1, ", ", ?l_2, ")"], ?l);
LatexAbs @ latex(abs(?x, ?t, ?e), ?l) <- latex(?e, ?l_e), latex(?t, ?l_t), latex(?x, ?l_x), join(["\\lambda ", ?l_x, " : ", ?l_t, " . ", ?l_e], ?l);
LatexApp @ latex(app(?e_1, ?e_2), ?l) <-
latex(?e_1, ?l_1), latex(?e_2, ?l_2),
parenth(?e_1, app(?e_1, ?e_2), left, ?l_1, ?lp_1),
parenth(?e_2, app(?e_1, ?e_2), right, ?l_2, ?lp_2),
join([?lp_1, " \\enspace ", ?lp_2], ?l);
LatexTInt @ latex(tint, "\\text{tint}") <-;
LatexTStr @ latex(tstr, "\\text{tstr}") <-;
LatexTArr @ latex(tarr(?t_1, ?t_2), ?l) <-
latex(?t_1, ?l_1), latex(?t_2, ?l_2),
parenth(?t_1, tarr(?t_1, ?t_2), left, ?l_1, ?lp_1),
parenth(?t_2, tarr(?t_1, ?t_2), right, ?l_2, ?lp_2),
join([?lp_1, " \\to ", ?lp_2], ?l);
LatexTPair @ latex(tpair(?t_1, ?t_2), ?l) <- latex(?t_1, ?l_1), latex(?t_2, ?l_2), join(["(", ?l_1, ", ", ?l_2, ")"], ?l);
LatexTypeEmpty @ latex(empty, "\\varnothing") <-;
LatexTypeExtend @ latex(extend(?a, ?b, ?c), ?l) <- latex(?a, ?l_a), latex(?b, ?l_b), latex(?c, ?l_c), join([?l_a, " , ", ?l_b, " : ", ?l_c], ?l);
LatexTypeInenv @ latex(inenv(?x, ?t, ?G), ?l) <-latex(?x, ?l_x), latex(?t, ?l_t), latex(?G, ?l_G), join([?l_x, " : ", ?l_t, " \\in ", ?l_G], ?l);
LatexTypeBin @ latex(type(?e, ?t), ?l) <- latex(?e, ?l_e), latex(?t, ?l_t), join([?l_e, " : ", ?l_t], ?l);
LatexTypeTer @ latex(type(?G, ?e, ?t), ?l) <- latex(?G, ?l_G), latex(?e, ?l_e), latex(?t, ?l_t), join([?l_G, " \\vdash ", ?l_e, " : ", ?l_t], ?l);
LatexConverts @ latex(converts(?f, ?t), ?l) <- latex(?f, ?l_f), latex(?t, ?l_t), join([?l_f, " \\preceq ", ?l_t], ?l);
LatexIsInt @ latex(int(?e), ?l) <- latex(?e, ?l_e), join([?l_e, " \\in \\texttt{Int}"], ?l);
LatexIsFloat @ latex(float(?e), ?l) <- latex(?e, ?l_e), join([?l_e, " \\in \\texttt{Float}"], ?l);
LatexIsNum @ latex(num(?e), ?l) <- latex(?e, ?l_e), join([?l_e, " \\in \\texttt{Num}"], ?l);
LatexIsStr @ latex(str(?e), ?l) <- latex(?e, ?l_e), join([?l_e, " \\in \\texttt{Str}"], ?l);
LatexSym @ latex(?s, ?l) <- sym(?s), tostring(?s, ?l_1), join(["\\text{", ?l_1,"}"], ?l);
LatexCall @ latex(?c, ?l) <- call(?c, ?n, ?ts), nonempty(?ts), latexlist(?ts, ?lts_1), intercalate(", ", ?lts_1, ?lts_2), join(?lts_2, ?lts_3), join(["\\text{", ?n, "}", "(", ?lts_3, ")"], ?l);

View File

@ -1,174 +0,0 @@
@import "variables.scss";
@import "mixins.scss";
.bergamot-exercise {
counter-increment: bergamot-exercise;
.bergamot-root {
border: none;
padding: 0;
margin-top: 1em;
}
.bergamot-exercise-label {
.bergamot-exercise-number::after {
content: "Exercise " counter(bergamot-exercise);
font-weight: bold;
text-decoration: underline;
}
}
.bergamot-button {
@include bordered-block;
padding: 0.25em;
padding-left: 1em;
padding-right: 1em;
background-color: inherit;
display: inline-flex;
align-items: center;
justify-content: center;
transition: 0.25s;
font-family: $font-body;
@include var(color, text-color);
&.bergamot-hidden {
display: none;
}
.feather {
margin-right: 0.5em;
}
}
.bergamot-play {
.feather { color: $primary-color; }
&:hover, &:focus {
.feather { color: lighten($primary-color, 20%); }
}
}
.bergamot-reset {
.feather { color: #0099CC; }
&:hover, &:focus {
.feather { color: lighten(#0099CC, 20%); }
}
svg {
fill: none;
}
}
.bergamot-close {
.feather { color: tomato; }
&:hover, &:focus {
.feather { color: lighten(tomato, 20%); }
}
}
.bergamot-button-group {
margin-top: 1em;
}
}
.bergamot-root {
@include bordered-block;
padding: 1em;
.bergamot-section-heading {
margin-bottom: 0.5em;
font-family: $font-body;
font-style: normal;
font-weight: bold;
font-size: 1.25em;
}
.bergamot-section {
margin-bottom: 1em;
}
textarea {
display: block;
width: 100%;
height: 10em;
resize: none;
}
input[type="text"] {
width: 100%;
@include textual-input;
}
.bergamot-rule-list {
display: flex;
flex-direction: row;
flex-wrap: wrap;
justify-content: center;
}
.bergamot-rule-list katex-expression {
margin-left: .5em;
margin-right: .5em;
flex-grow: 1;
flex-basis: 0;
}
.bergamot-rule-section {
.bergamot-rule-section-name {
text-align: center;
margin: 0.25em;
font-weight: bold;
}
}
.bergamot-proof-tree {
overflow: auto;
}
.bergamot-error {
@include bordered-block;
padding: 0.5rem;
border-color: tomato;
background-color: rgba(tomato, 0.25);
margin-top: 1rem;
}
.bergamot-selector {
button {
@include var(background-color, background-color);
@include var(color, text-color);
@include bordered-block;
padding: 0.5rem;
font-family: $font-body;
border-style: dotted;
&.active {
border-color: $primary-color;
border-style: solid;
font-weight: bold;
}
&:not(:first-child) {
border-bottom-left-radius: 0;
border-top-left-radius: 0;
}
&:not(:last-child) {
border-bottom-right-radius: 0;
border-top-right-radius: 0;
border-right-width: 0;
}
}
button.active + button {
border-left-color: $primary-color;
border-left-style: solid;
}
margin-bottom: 1rem;
}
.bergamot-no-proofs {
text-align: center;
}
}

View File

@ -1,56 +0,0 @@
@import "../../themes/vanilla/assets/scss/mixins.scss";
.donation-methods {
padding: 0;
border: none;
border-spacing: 0 0.5rem;
td {
padding: 0;
overflow: hidden;
&:first-child {
@include bordered-block;
text-align: right;
border-right: none;
border-top-right-radius: 0;
border-bottom-right-radius: 0;
padding-left: 0.5em;
padding-right: 0.5rem;
@include below-container-width {
@include bordered-block;
text-align: center;
border-bottom: none;
border-bottom-left-radius: 0;
border-bottom-right-radius: 0;
}
}
&:last-child {
@include bordered-block;
border-top-left-radius: 0;
border-bottom-left-radius: 0;
@include below-container-width {
@include bordered-block;
border-top-left-radius: 0;
border-top-right-radius: 0;
}
}
}
tr {
@include below-container-width {
margin-bottom: 0.5rem;
}
}
code {
width: 100%;
box-sizing: border-box;
border: none;
display: inline-block;
padding: 0.25rem;
}
}

View File

@ -1,11 +0,0 @@
@import "variables.scss";
@import "mixins.scss";
.assumption-number {
font-weight: bold;
}
.assumption {
@include bordered-block;
padding: 0.8rem;
}

View File

@ -1,430 +0,0 @@
@import "variables.scss";
body {
background-color: #1c1e26;
--text-color: white;
font-family: $font-code;
}
h1, h2, h3, h4, h5, h6 {
text-align: left;
font-family: $font-code;
}
h1::after {
content: "(writing)";
font-size: 1rem;
margin-left: 0.5em;
position: relative;
bottom: -0.5em;
color: $primary-color;
}
nav .container {
justify-content: flex-start;
a {
padding-left: 0;
margin-right: 1em;
}
}
.header-divider {
visibility: hidden;
}
hr {
height: auto;
border: none;
&::after {
content: "* * *";
color: $primary-color;
font-size: 2rem;
display: block;
text-align: center;
}
}
/* Code for the CSS glitch effect. Originally from: https://codepen.io/mattgrosswork/pen/VwprebG */
.glitch {
position: relative;
span {
animation: paths 5s step-end infinite;
font-weight: bold;
}
&::before, &::after {
content: attr(data-text);
position: absolute;
width: 110%;
z-index: -1;
}
&::before {
top: 10px;
left: 15px;
color: #e0287d;
animation: paths 5s step-end infinite, opacity 5s step-end infinite,
font 8s step-end infinite, movement 10s step-end infinite;
}
&::after {
top: 5px;
left: -10px;
color: #1bc7fb;
animation: paths 5s step-end infinite, opacity 5s step-end infinite,
font 7s step-end infinite, movement 8s step-end infinite;
}
}
@keyframes paths {
0% {
clip-path: polygon(
0% 43%,
83% 43%,
83% 22%,
23% 22%,
23% 24%,
91% 24%,
91% 26%,
18% 26%,
18% 83%,
29% 83%,
29% 17%,
41% 17%,
41% 39%,
18% 39%,
18% 82%,
54% 82%,
54% 88%,
19% 88%,
19% 4%,
39% 4%,
39% 14%,
76% 14%,
76% 52%,
23% 52%,
23% 35%,
19% 35%,
19% 8%,
36% 8%,
36% 31%,
73% 31%,
73% 16%,
1% 16%,
1% 56%,
50% 56%,
50% 8%
);
}
5% {
clip-path: polygon(
0% 29%,
44% 29%,
44% 83%,
94% 83%,
94% 56%,
11% 56%,
11% 64%,
94% 64%,
94% 70%,
88% 70%,
88% 32%,
18% 32%,
18% 96%,
10% 96%,
10% 62%,
9% 62%,
9% 84%,
68% 84%,
68% 50%,
52% 50%,
52% 55%,
35% 55%,
35% 87%,
25% 87%,
25% 39%,
15% 39%,
15% 88%,
52% 88%
);
}
30% {
clip-path: polygon(
0% 53%,
93% 53%,
93% 62%,
68% 62%,
68% 37%,
97% 37%,
97% 89%,
13% 89%,
13% 45%,
51% 45%,
51% 88%,
17% 88%,
17% 54%,
81% 54%,
81% 75%,
79% 75%,
79% 76%,
38% 76%,
38% 28%,
61% 28%,
61% 12%,
55% 12%,
55% 62%,
68% 62%,
68% 51%,
0% 51%,
0% 92%,
63% 92%,
63% 4%,
65% 4%
);
}
45% {
clip-path: polygon(
0% 33%,
2% 33%,
2% 69%,
58% 69%,
58% 94%,
55% 94%,
55% 25%,
33% 25%,
33% 85%,
16% 85%,
16% 19%,
5% 19%,
5% 20%,
79% 20%,
79% 96%,
93% 96%,
93% 50%,
5% 50%,
5% 74%,
55% 74%,
55% 57%,
96% 57%,
96% 59%,
87% 59%,
87% 65%,
82% 65%,
82% 39%,
63% 39%,
63% 92%,
4% 92%,
4% 36%,
24% 36%,
24% 70%,
1% 70%,
1% 43%,
15% 43%,
15% 28%,
23% 28%,
23% 71%,
90% 71%,
90% 86%,
97% 86%,
97% 1%,
60% 1%,
60% 67%,
71% 67%,
71% 91%,
17% 91%,
17% 14%,
39% 14%,
39% 30%,
58% 30%,
58% 11%,
52% 11%,
52% 83%,
68% 83%
);
}
76% {
clip-path: polygon(
0% 26%,
15% 26%,
15% 73%,
72% 73%,
72% 70%,
77% 70%,
77% 75%,
8% 75%,
8% 42%,
4% 42%,
4% 61%,
17% 61%,
17% 12%,
26% 12%,
26% 63%,
73% 63%,
73% 43%,
90% 43%,
90% 67%,
50% 67%,
50% 41%,
42% 41%,
42% 46%,
50% 46%,
50% 84%,
96% 84%,
96% 78%,
49% 78%,
49% 25%,
63% 25%,
63% 14%
);
}
90% {
clip-path: polygon(
0% 41%,
13% 41%,
13% 6%,
87% 6%,
87% 93%,
10% 93%,
10% 13%,
89% 13%,
89% 6%,
3% 6%,
3% 8%,
16% 8%,
16% 79%,
0% 79%,
0% 99%,
92% 99%,
92% 90%,
5% 90%,
5% 60%,
0% 60%,
0% 48%,
89% 48%,
89% 13%,
80% 13%,
80% 43%,
95% 43%,
95% 19%,
80% 19%,
80% 85%,
38% 85%,
38% 62%
);
}
1%,
7%,
33%,
47%,
78%,
93% {
clip-path: none;
}
}
@keyframes movement {
0% {
top: 0px;
left: -20px;
}
15% {
top: 10px;
left: 10px;
}
60% {
top: 5px;
left: -10px;
}
75% {
top: -5px;
left: 20px;
}
100% {
top: 10px;
left: 5px;
}
}
@keyframes opacity {
0% {
opacity: 0.1;
}
5% {
opacity: 0.7;
}
30% {
opacity: 0.4;
}
45% {
opacity: 0.6;
}
76% {
opacity: 0.4;
}
90% {
opacity: 0.8;
}
1%,
7%,
33%,
47%,
78%,
93% {
opacity: 0;
}
}
@keyframes font {
0% {
font-weight: 100;
color: #e0287d;
filter: blur(3px);
}
20% {
font-weight: 500;
color: #fff;
filter: blur(0);
}
50% {
font-weight: 300;
color: #1bc7fb;
filter: blur(2px);
}
60% {
font-weight: 700;
color: #fff;
filter: blur(0);
}
90% {
font-weight: 500;
color: #e0287d;
filter: blur(6px);
}
}

View File

@ -1,70 +0,0 @@
require "json"
require "set"
require "optparse"
require "fileutils"
# For target_dir, use absolute paths because when invoking Agda, we'll be
# using chdir.
root_path = "code"
target_dir = File.expand_path "code"
data_file = "data/submodules.json"
OptionParser.new do |opts|
opts.banner = "Usage: build-agda-html.rb [options]"
opts.on("--root-path=PATH", "Search for Agda project folders in the given location") do |f|
root_path = f
end
opts.on("--target-dir=PATH", "Generate HTML files into the given directory") do |f|
target_dir = File.expand_path f
end
opts.on("--data-file=FILE", "Specify the submodules.json that encodes nested submodule structure") do |f|
data_file = f
end
end.parse!
files = ARGV
code_paths = Dir.entries(root_path).select do |f|
File.directory?(File.join(root_path, f)) and f != '.' and f != '..'
end.to_set
code_paths += JSON.parse(File.read(data_file)).keys if File.exists? data_file
# Extending code_paths from submodules.json means that nested Agda modules
# have their root dir correctly set.
max_path = ->(path) {
code_paths.max_by do |code_path|
count = 0
path.chars.zip(code_path.chars) do |c1, c2|
break unless c1 == c2
count += 1
end
next count
end
}
files_for_paths = {}
Dir.glob("**/*.agda", base: root_path) do |agda_file|
best_path = max_path.call(agda_file)
files_for_path = files_for_paths.fetch(best_path) do
files_for_paths[best_path] = []
end
files_for_path << agda_file[best_path.length + File::SEPARATOR.length..-1]
end
original_wd = Dir.getwd
files_for_paths.each do |path, files|
Dir.chdir(original_wd)
Dir.chdir(File.join(root_path, path))
html_dir = File.join [target_dir, path, "html"]
FileUtils.mkdir_p html_dir
files.each do |file|
command = "#{ARGV[0]} #{file} --html --html-dir=#{html_dir}"
puts command
puts `#{command}`
# Allow some programs to fail (e.g., IO.agda in SPA without --guardedness)
# fail unless $? == 0
end
end

View File

@ -1,87 +0,0 @@
open import Agda.Primitive using (Level; lsuc)
open import Relation.Binary.PropositionalEquality using (_≡_)
variable
a : Level
A : Set a
module FirstAttempt where
record Semigroup (A : Set a) : Set a where
field
_∙_ : A A A
isAssociative : (a₁ a₂ a₃ : A) a₁ (a₂ a₃) (a₁ a₂) a₃
record Monoid (A : Set a) : Set a where
field semigroup : Semigroup A
open Semigroup semigroup public
field
zero : A
isIdentityLeft : (a : A) zero a a
isIdentityRight : (a : A) a zero a
record ContrivedExample (A : Set a) : Set a where
field
-- first property
monoid : Monoid A
-- second property; Semigroup is a stand-in.
semigroup : Semigroup A
operationsEqual : Monoid._∙_ monoid Semigroup._∙_ semigroup
module SecondAttempt where
record IsSemigroup {A : Set a} (_∙_ : A A A) : Set a where
field isAssociative : (a₁ a₂ a₃ : A) a₁ (a₂ a₃) (a₁ a₂) a₃
record IsMonoid {A : Set a} (zero : A) (_∙_ : A A A) : Set a where
field
isSemigroup : IsSemigroup _∙_
isIdentityLeft : (a : A) zero a a
isIdentityRight : (a : A) a zero a
open IsSemigroup isSemigroup public
record IsContrivedExample {A : Set a} (zero : A) (_∙_ : A A A) : Set a where
field
-- first property
monoid : IsMonoid zero _∙_
-- second property; Semigroup is a stand-in.
semigroup : IsSemigroup _∙_
record Semigroup (A : Set a) : Set a where
field
_∙_ : A A A
isSemigroup : IsSemigroup _∙_
record Monoid (A : Set a) : Set a where
field
zero : A
_∙_ : A A A
isMonoid : IsMonoid zero _∙_
module ThirdAttempt {A : Set a} (_∙_ : A A A) where
record IsSemigroup : Set a where
field isAssociative : (a₁ a₂ a₃ : A) a₁ (a₂ a₃) (a₁ a₂) a₃
record IsMonoid (zero : A) : Set a where
field
isSemigroup : IsSemigroup
isIdentityLeft : (a : A) zero a a
isIdentityRight : (a : A) a zero a
open IsSemigroup isSemigroup public
record IsContrivedExample (zero : A) : Set a where
field
-- first property
monoid : IsMonoid zero
-- second property; Semigroup is a stand-in.
semigroup : IsSemigroup