syntax.oak
← Standard library
See on GitHub ↗
{
default: default
fromHex: fromHex
range: range
slice: slice
append: append
contains?: contains?
map: map
each: each
last: last
take: take
first: first
filter: filter
reduce: reduce
} := import('std')
{
digit?: digit?
word?: word?
space?: space?
cut: cut
contains?: strContains?
join: join
replace: replace
startsWith?: startsWith?
trimStart: trimStart
trimEnd: trimEnd
trim: trim
} := import('str')
{
min: min
max: max
} := import('math')
{
format: format
printf: printf
} := import('fmt')
fn shebang?(text) text |> startsWith?('#!')
fn renderPos(pos) '[' + string(pos.1) + ':' + string(pos.2) + ']'
fn renderToken(token) if token.val {
? -> format('{{ 0 }} {{ 1 }}', string(token.type), renderPos(token.pos))
_ -> format('{{ 0 }}({{ 1 }}) {{ 2 }}', string(token.type), token.val, renderPos(token.pos))
}
fn Tokenizer(source) {
index := 0
line := 1
col := 1
fn TokenAt(type, pos, val) {
type: type
val: val
pos: pos
}
fn Token(type, val) TokenAt(type, [index, line, col], val)
fn eof? index = len(source)
fn peek source.(index)
fn peekAhead(n) if index + n >= len(source) {
true -> ' '
_ -> source.(index + n)
}
fn next {
char := source.(index)
if index < len(source) -> index <- index + 1
if char {
'\n' -> {
line <- line + 1
col <- 1
}
_ -> col <- col + 1
}
char
}
fn back {
if index > 0 -> index <- index - 1
if source.(index) {
'\n' -> line <- line - 1
_ -> col <- col - 1
}
}
fn readUntilChar(c) {
fn sub(acc) if !eof?() & peek() != c {
true -> sub(acc << next())
_ -> acc
}
sub('')
}
fn readValidIdentifier {
fn sub(acc) if eof?() {
true -> acc
_ -> {
c := next()
if word?(c) | c = '_' | c = '?' | c = '!' {
true -> sub(acc << c)
_ -> {
back()
acc
}
}
}
}
sub('')
}
fn readValidNumeral {
sawDot? := false
fn sub(acc) if eof?() {
true -> acc
_ -> {
c := next()
if {
digit?(c) -> sub(acc << c)
c = '.' & !sawDot? -> {
sawDot? <- true
sub(acc << c)
}
_ -> {
back()
acc
}
}
}
}
sub('')
}
fn nextToken {
pos := [index, line, col]
if c := next() {
',' -> TokenAt(:comma, pos)
'.' -> if peek() = '.' & peekAhead(1) = '.' {
true -> {
next()
next()
TokenAt(:ellipsis, pos)
}
_ -> TokenAt(:dot, pos)
}
'(' -> TokenAt(:leftParen, pos)
')' -> TokenAt(:rightParen, pos)
'[' -> TokenAt(:leftBracket, pos)
']' -> TokenAt(:rightBracket, pos)
'{' -> TokenAt(:leftBrace, pos)
'}' -> TokenAt(:rightBrace, pos)
':' -> if peek() {
'=' -> {
next()
TokenAt(:assign, pos)
}
_ -> TokenAt(:colon, pos)
}
'<' -> if peek() {
'<' -> {
next()
TokenAt(:pushArrow, pos)
}
'-' -> {
next()
TokenAt(:nonlocalAssign, pos)
}
'=' -> {
next()
TokenAt(:leq, pos)
}
_ -> TokenAt(:less, pos)
}
'?' -> TokenAt(:qmark, pos)
'!' -> if peek() {
'=' -> {
next()
TokenAt(:neq, pos)
}
_ -> TokenAt(:exclam, pos)
}
'+' -> TokenAt(:plus, pos)
'-' -> if peek() {
'>' -> {
next()
TokenAt(:branchArrow, pos)
}
_ -> TokenAt(:minus, pos)
}
'*' -> TokenAt(:times, pos)
'/' -> if peek() {
'/' -> {
next()
commentString := readUntilChar('\n') |> trimEnd()
if commentString |> trim() = '' -> commentString <- ''
TokenAt(:comment, pos, commentString)
}
_ -> TokenAt(:divide, pos)
}
'%' -> TokenAt(:modulus, pos)
'^' -> TokenAt(:xor, pos)
'&' -> TokenAt(:and, pos)
'|' -> if peek() {
'>' -> {
next()
TokenAt(:pipeArrow, pos)
}
_ -> TokenAt(:or, pos)
}
'>' -> if peek() {
'=' -> {
next()
TokenAt(:geq, pos)
}
_ -> TokenAt(:greater, pos)
}
'=' -> TokenAt(:eq, pos)
'\'' -> {
fn sub(payload) if charInString := next() {
?, '\'' -> payload
'\\' -> if c := next() {
? -> payload
_ -> sub(payload << '\\' << c)
}
_ -> sub(payload << charInString)
}
TokenAt(:stringLiteral, pos, sub(''))
}
_ -> if {
digit?(c) -> TokenAt(:numberLiteral, pos, c << readValidNumeral())
_ -> if payload := c << readValidIdentifier() {
'_' -> TokenAt(:underscore, pos)
'if' -> TokenAt(:ifKeyword, pos)
'fn' -> TokenAt(:fnKeyword, pos)
'with' -> TokenAt(:withKeyword, pos)
'true' -> TokenAt(:trueLiteral, pos)
'false' -> TokenAt(:falseLiteral, pos)
_ -> TokenAt(:identifier, pos, payload)
}
}
}
}
fn tokenize {
tokens := []
if peek() = '#' & peekAhead(1) = '!' -> {
readUntilChar('\n')
if !eof?() -> next()
}
fn eatSpace if space?(sp := peek()) -> {
if sp = '\n' -> tokens << Token(:newline)
next()
eatSpace()
}
eatSpace()
lastTok := Token(:comma)
fn sub {
nextTok := nextToken()
if !(
[:leftParen, :leftBracket, :leftBrace, :comma] |> contains?(lastTok.type)
) & [:rightParen, :rightBracket, :rightBrace] |> contains?(nextTok.type) -> tokens << TokenAt(:comma, nextTok.pos)
tokens << nextTok
if nextTok.type = :comment -> nextTok := lastTok
fn eatSpaceAutoInsertComma if space?(peek()) -> {
if peek() {
'\n' -> {
if nextTok.type {
:comma, :leftParen, :leftBracket, :leftBrace
:plus, :minus, :times, :divide, :modulus, :xor
:and, :or, :exclam, :greater, :less, :eq, :geq
:leq, :assign, :nonlocalAssign, :dot, :colon
:fnKeyword, :ifKeyword, :withKeyword
:pipeArrow, :branchArrow, :pushArrow -> ?
_ -> {
nextTok <- Token(:comma)
tokens << nextTok
}
}
tokens << Token(:newline)
}
}
next()
eatSpaceAutoInsertComma()
}
eatSpaceAutoInsertComma()
if nextTok.type {
:comment -> ?
_ -> lastTok <- nextTok
}
if !eof?() -> sub()
}
if !eof?() -> sub()
if lastTok.type {
:comma -> ?
_ -> tokens << TokenAt(:comma, [
len(source)
line
col
])
}
tokens
}
{
readUntilChar: readUntilChar
tokenize: tokenize
}
}
fn tokenize(text) Tokenizer(text).tokenize()
fn Parser(tokens) {
index := 0
minBinaryPrec := [0]
tokens := tokens |> filter(fn(tok) if tok.type {
:newline, :comment -> false
_ -> true
})
fn error(msg, pos) {
type: :error
error: msg
pos: pos
}
fn lastMinPrec minBinaryPrec.(len(minBinaryPrec) - 1)
fn pushMinPrec(prec) minBinaryPrec << prec
fn popMinPrec minBinaryPrec <- slice(minBinaryPrec, 0, len(minBinaryPrec) - 1)
fn eof? index = len(tokens)
fn peek tokens.(index)
fn peekAhead(n) if index + n > len(tokens) {
true -> { type: :comma }
_ -> tokens.(index + n)
}
fn next {
tok := tokens.(index)
if index < len(tokens) -> index <- index + 1
tok
}
fn back if index > 0 -> index <- index - 1
fn lastTokenPos if lastTok := last(tokens) {
? -> ?
_ -> lastTok.pos
}
fn expect(type) if eof?() {
true -> error(format('Unexpected end of input, expected {{0}}', type), lastTokenPos())
_ -> {
nextTok := next()
if nextTok.type {
type -> nextTok
_ -> error(format('Unexpected token {{0}}, expected {{1}}', renderToken(nextTok), type), nextTok.pos)
}
}
}
fn readUntilTokenType(type) {
tokens := []
fn sub if !eof() & peek().type != type {
true -> {
tokens << next()
sub()
}
_ -> tokens
}
sub()
}
fn notError(x, withNotErr) if x {
{ type: :error, error: _, pos: _ } -> x
_ -> withNotErr(x)
}
fn parseAssignment(left) if peek().type {
:assign, :nonlocalAssign -> {
nxt := next()
node := {
type: :assignment
tok: nxt
local?: nxt.type = :assign
left: left
}
with notError(right := parseNode()) fn {
node.right := right
node
}
}
_ -> left
}
fn parseUnit if eof?() {
true -> error('Unexpected end of input', lastTokenPos())
_ -> {
tok := next()
if tok.type {
:qmark -> { type: :null, tok: tok }
:stringLiteral -> {
type: :string
tok: tok
val: {
verbatim := tok.val
fn sub(parsed, i) if c := verbatim.(i) {
? -> parsed
'\\' -> if escapedChar := verbatim.(i + 1) {
't' -> sub(parsed << '\t', i + 2)
'n' -> sub(parsed << '\n', i + 2)
'r' -> sub(parsed << '\r', i + 2)
'f' -> sub(parsed << '\f', i + 2)
'x' -> if c1 := verbatim.(i + 2) {
? -> sub(parsed << escapedChar, i + 2)
_ -> if c2 := verbatim.(i + 3) {
? -> sub(parsed << escapedChar << c1, i + 3)
_ -> if code := fromHex(c1 + c2) {
? -> sub(parsed << escapedChar << c1 << c2, i + 4)
_ -> sub(parsed << char(code), i + 4)
}
}
}
_ -> sub(parsed << escapedChar, i + 2)
}
_ -> sub(parsed << c, i + 1)
}
sub('', 0)
}
}
:numberLiteral -> if tok.val |> strContains?('.') {
true -> if parsed := float(tok.val) {
? -> error(format('Could not parse floating point number {{0}}', tok.val), tok.pos)
_ -> {
type: :float
tok: tok
val: parsed
}
}
_ -> if parsed := int(tok.val) {
? -> error(format('Could not parse integer number {{0}}', tok.val), tok.pos)
_ -> {
type: :int
tok: tok
val: parsed
}
}
}
:trueLiteral -> {
type: :bool
tok: tok
val: true
}
:falseLiteral -> {
type: :bool
tok: tok
val: false
}
:colon -> if peek().type {
:identifier -> {
type: :atom
tok: tok
val: next().val
}
:ifKeyword -> {
next()
{ type: :atom, tok: tok, val: 'if' }
}
:fnKeyword -> {
next()
{ type: :atom, tok: tok, val: 'fn' }
}
:withKeyword -> {
next()
{ type: :atom, tok: tok, val: 'with' }
}
:trueLiteral -> {
next()
{ type: :atom, tok: tok, val: 'true' }
}
:falseLiteral -> {
next()
{ type: :atom, tok: tok, val: 'false' }
}
_ -> error(format('Expected identifier after ":", got {{0}}', renderToken(peek())), peek().pos)
}
:leftBracket -> {
pushMinPrec(0)
itemNodes := []
fn sub if eof?() {
true -> error('Unexpected end of input inside list', lastTokenPos())
_ -> if peek().type {
:rightBracket -> ?
_ -> with notError(node := parseNode()) fn {
with notError(err := expect(:comma)) fn {
itemNodes << node
sub()
}
}
}
}
with notError(sub()) fn {
with notError(err := expect(:rightBracket)) fn {
popMinPrec()
{
type: :list
tok: tok
elems: itemNodes
}
}
}
}
:leftBrace -> {
pushMinPrec(0)
if peek().type {
:rightBrace -> {
next()
popMinPrec()
{
type: :object
tok: tok
entries: []
}
}
_ -> with notError(firstExpr := parseNode()) fn if eof?() {
true -> error('Unexpected end of input inside block or object', lastTokenPos())
_ -> if peek().type {
:colon -> {
next()
with notError(valExpr := parseNode()) fn {
with notError(expect(:comma)) fn {
entries := [{ key: firstExpr, val: valExpr }]
fn sub if !eof?() -> if peek().type {
:rightBrace -> ?
_ -> with notError(key := parseNode()) fn {
with notError(expect(:colon)) fn {
with notError(val := parseNode()) fn {
with notError(expect(:comma)) fn {
entries << { key: key, val: val }
sub()
}
}
}
}
}
with notError(sub()) fn {
with notError(expect(:rightBrace)) fn {
popMinPrec()
{
type: :object
tok: tok
entries: entries
}
}
}
}
}
}
_ -> with notError(expect(:comma)) fn {
exprs := [firstExpr]
fn sub if eof?() {
true -> error('Unexpected end of input inside block or object', lastTokenPos())
_ -> if peek().type {
:rightBrace -> ?
_ -> with notError(expr := parseNode()) fn {
with notError(expect(:comma)) fn {
exprs << expr
sub()
}
}
}
}
with notError(sub()) fn {
with notError(expect(:rightBrace)) fn {
popMinPrec()
{
type: :block
tok: tok
exprs: exprs
}
}
}
}
}
}
}
}
:fnKeyword -> {
pushMinPrec(0)
name := if peek().type {
:identifier -> next().val
_ -> ''
}
args := []
restArg := ''
fn parseBody with notError(body := parseNode()) fn {
if body {
{ type: :object, tok: _, entries: [] } -> body <- {
type: :block
tok: body.tok
exprs: []
}
}
popMinPrec()
{
type: :function
name: name
tok: tok
args: args
restArg: restArg
body: body
}
}
if peek().type {
:leftParen -> {
next()
fn sub if !eof?() -> if peek().type {
:rightParen -> ?
_ -> {
arg := expect(:identifier)
if arg.type {
:error -> {
back()
with notError(expect(:underscore)) fn {
args << '_'
with notError(expect(:comma)) fn {
sub()
}
}
}
_ -> if peek().type {
:ellipsis -> {
restArg <- arg.val
next()
with notError(expect(:comma)) fn {
sub()
}
}
_ -> {
args << arg.val
with notError(expect(:comma)) fn {
sub()
}
}
}
}
}
}
with notError(sub()) fn {
with notError(expect(:rightParen)) fn {
parseBody()
}
}
}
_ -> parseBody()
}
}
:underscore -> {
type: :empty
tok: tok
}
:identifier -> {
type: :identifier
tok: tok
val: tok.val
}
:minus, :exclam -> with notError(right := parseSubNode()) fn {
type: :unary
tok: tok
op: tok.type
right: right
}
:ifKeyword -> {
pushMinPrec(0)
condNode := if peek().type {
:leftBrace -> {
type: :bool
val: true
tok: tok
}
_ -> parseNode()
}
if eof?() {
true -> error('Unexpected end of input in if expression', lastTokenPos())
_ -> if peek().type {
:branchArrow -> {
arrowTok := next()
with notError(body := parseNode()) fn {
{
type: :ifExpr
tok: tok
cond: condNode
branches: [{
type: :ifBranch
target: {
type: :bool
val: true
tok: arrowTok
}
body: body
}]
}
}
}
_ -> with notError(condNode) fn {
with notError(expect(:leftBrace)) fn {
fn subBranch(branches) if eof?() {
false -> if peek().type {
:rightBrace -> branches
_ -> {
fn subTarget(targets) if eof?() {
true -> targets
_ -> with notError(target := parseNode()) fn if peek().type {
:branchArrow -> targets << target
_ -> with notError(expect(:comma)) fn {
subTarget(targets << target)
}
}
}
with notError(targets := subTarget([])) fn {
with notError(expect(:branchArrow)) fn {
with notError(body := parseNode()) fn {
with notError(expect(:comma)) fn {
subBranch(branches |> append(targets |> with map() fn(target) {
type: :ifBranch
target: target
body: body
}))
}
}
}
}
}
}
_ -> branches
}
with notError(branches := subBranch([])) fn {
with notError(expect(:rightBrace)) fn {
popMinPrec()
{
type: :ifExpr
tok: tok
cond: condNode
branches: branches
}
}
}
}
}
}
}
}
:withKeyword -> {
pushMinPrec(0)
with notError(base := parseNode()) fn if base.type {
:fnCall -> with notError(lastArg := parseNode()) fn {
popMinPrec()
base.args << lastArg
base
}
_ -> error(format('with keyword should be followed by a fn call, found {{0}}', base), tok.pos)
}
}
:leftParen -> {
pushMinPrec(0)
fn subExpr(exprs) if eof?() {
true -> error('Unexpected end of input inside block', lastTokenPos())
_ -> if peek().type {
:rightParen -> exprs
_ -> with notError(expr := parseNode()) fn {
with notError(expect(:comma)) fn {
subExpr(exprs << expr)
}
}
}
}
with notError(exprs := subExpr([])) fn {
with notError(expect(:rightParen)) fn {
popMinPrec()
{
type: :block
tok: tok
exprs: exprs
}
}
}
}
_ -> error(format('Unexpected token {{0}} at start of unit', renderToken(tok)), tok.pos)
}
}
}
fn infixOpPrecedence(op) if op {
:plus, :minus -> 40
:times, :divide -> 50
:modulus -> 80
:eq, :greater, :less, :geq, :leq, :neq -> 30
:and -> 20
:xor -> 15
:or -> 10
:pushArrow -> 1
_ -> -1
}
fn parseSubNode {
pushMinPrec(0)
with notError(node := parseUnit()) fn {
fn sub if !eof?() -> if peek().type {
:dot -> {
nxt := next()
with notError(right := parseUnit()) fn {
node <- {
type: :propertyAccess
tok: nxt
left: node
right: right
}
sub()
}
}
:leftParen -> {
nxt := next()
args := []
restArg := ?
fn subArg if !eof?() -> if peek().type {
:rightParen -> with notError(expect(:rightParen)) fn {}
_ -> with notError(arg := parseNode()) fn if eof?() {
true -> error('Unexpected end of input inside argument list', lastTokenPos())
_ -> if peek().type {
:ellipsis -> {
next()
with notError(expect(:comma)) fn {
restArg <- arg
subArg()
}
}
:comma -> {
next()
args << arg
subArg()
}
_ -> error(format('Expected comma after arg in argument list, got {{0}}', peek().type), peek().pos)
}
}
}
with notError(subArg()) fn {
node <- {
type: :fnCall
function: node
args: args
restArg: restArg
tok: nxt
}
sub()
}
}
}
with notError(sub()) fn {
popMinPrec()
node
}
}
}
fn parseNode with notError(node := parseSubNode()) fn {
fn sub if !eof?() -> if peek().type {
:comma -> ?
:assign, :nonlocalAssign -> node <- parseAssignment(node)
:plus, :minus, :times, :divide, :modulus, :xor, :and, :or
:pushArrow, :greater, :less, :eq, :geq, :leq, :neq -> {
minPrec := lastMinPrec()
fn subBinary if eof?() {
true -> error('Incomplete binary expression', lastTokenPos())
_ -> {
peeked := peek()
op := peeked.type
prec := infixOpPrecedence(op)
if prec > minPrec -> {
next()
if eof?() {
true -> error(format('Incomplete binary expression with {{0}}', { type: op }), peek().pos)
_ -> {
pushMinPrec(prec)
with notError(right := parseNode()) fn {
popMinPrec()
node <- {
type: :binary
tok: peeked
op: op
left: node
right: right
}
subBinary()
}
}
}
}
}
}
with notError(subBinary()) fn {
node
}
}
:pipeArrow -> {
pipe := next()
with notError(pipeRight := parseSubNode()) fn if pipeRight.type {
:fnCall -> {
pipeRight.args := append([node], pipeRight.args)
node <- pipeRight
sub()
}
_ -> error(format('Expected function call after |>, got {{0}}', pipeRight), pipe.pos)
}
}
}
with notError(sub()) fn {
node
}
}
{
parse: fn {
nodes := []
fn sub if !eof?() -> with notError(node := parseNode()) fn {
with notError(expect(:comma)) fn {
nodes << node
sub()
}
}
with notError(sub()) fn {
nodes
}
}
}
}
fn parse(text) {
tokens := tokenize(text)
Parser(tokens).parse()
}
fn Printer(tokens) {
fn tabs(n) if {
n > 0 -> tabs(n - 1) << '\t'
_ -> ''
}
fn render(token) if token.type {
:comment -> '//' + token.val
:comma -> ','
:dot -> '.'
:leftParen -> '('
:rightParen -> ')'
:leftBracket -> '['
:rightBracket -> ']'
:leftBrace -> '{'
:rightBrace -> '}'
:assign -> ':='
:nonlocalAssign -> '<-'
:pipeArrow -> '|>'
:branchArrow -> '->'
:pushArrow -> '<<'
:colon -> ':'
:ellipsis -> '...'
:qmark -> '?'
:exclam -> '!'
:plus -> '+'
:minus -> '-'
:times -> '*'
:divide -> '/'
:modulus -> '%'
:xor -> '^'
:and -> '&'
:or -> '|'
:greater -> '>'
:less -> '<'
:eq -> '='
:geq -> '>='
:leq -> '<='
:neq -> '!='
:ifKeyword -> 'if'
:fnKeyword -> 'fn'
:withKeyword -> 'with'
:underscore -> '_'
:identifier -> token.val
:trueLiteral -> 'true'
:falseLiteral -> 'false'
:stringLiteral -> '\'' << token.val << '\''
:numberLiteral -> token.val
_ -> {
printf('Unknown token {{0}}', token)
string(token)
}
}
fn connectingToken?(tokenType) if tokenType {
:assign
:nonlocalAssign
:pipeArrow
:branchArrow
:pushArrow
:colon
:plus, :minus, :times, :divide, :modulus
:xor, :and, :or
:greater, :less, :eq, :geq, :leq, :neq -> true
_ -> false
}
fn last(list) list.(len(list) - 1)
fn print {
lines := ['']
indents := []
curr := 0
currs := [0]
hanging? := false
fn add(s, tabs) {
if last(lines) |> trim('\t') {
'' -> last(lines) << trimStart(s)
_ -> last(lines) << s
}
curr <- curr + tabs
currs << curr
}
fn purelyDescendingPrefix(list) if len(list) {
0 -> list
_ -> {
fn sub(i) if {
i = len(list) -> list |> take(i)
list.(i - 1) > list.(i) -> sub(i + 1)
_ -> list |> take(i)
}
sub(1)
}
}
fn indentLine(lastType) {
indent := min(currs...)
if {
{
prefix := purelyDescendingPrefix(currs)
len(prefix) <= 1 & default(prefix.0, 0) > indent
}
hanging? -> indent <- indent + 1
}
hanging? <- if {
connectingToken?(lastType)
lastType = :dot -> true
_ -> false
}
indent
}
tokens |> with each() fn(token, i) {
nextType := default(tokens.(i + 1), { type: :newline }).type
[lastLastType, lastType] := if i {
0 -> [:newline, :newline]
1 -> [:newline, tokens.(i - 1).type]
_ -> {
fn sub(prev) if prev {
-2, -1, 0 -> prev
_ -> if [tokens.(prev).type, tokens.(prev - 1).type] {
[:newline, :comment]
[:comment, :newline] -> sub(prev - 2)
[:comment, _] -> sub(prev - 1)
_ -> prev
}
}
prev := sub(i - 1) |> max(0)
[
if prev {
0 -> :newline
_ -> tokens.(prev - 1).type
}
tokens.(prev).type
]
}
}
if [lastType, token.type, nextType] {
[_, :newline, _] -> {
indents << indentLine(lastType)
currs <- [curr]
lines << ''
}
[_, :dot, _] -> add('.', 0)
[_, :leftParen, _] -> if {
connectingToken?(lastType)
lastType = :comma
lastType = :ifKeyword
lastType = :withKeyword -> add(' ' << render(token), 1)
_ -> add(render(token), 1)
}
[_, :leftBracket, _]
[_, :leftBrace, _] -> if lastType {
:leftParen
:leftBracket -> add(render(token), 1)
_ -> add(' ' << render(token), 1)
}
[:newline, :rightParen, _]
[:newline, :rightBracket, _]
[:newline, :rightBrace, _] -> add(render(token), -1)
[_, :rightParen, _]
[_, :rightBracket, _] -> add(render(token), -1)
[:leftBrace, :rightBrace, _] -> add(render(token), -1)
[_, :rightBrace, _] -> add(' ' << render(token), -1)
[_, :comma, :newline]
[_, :comma, :rightParen]
[_, :comma, :rightBracket]
[_, :comma, :rightBrace] -> ?
[_, :colon, _] -> if {
lastType = :comma
lastType = :leftBrace
connectingToken?(lastType) -> add(' :', 0)
_ -> add(':', 0)
}
[_, :comma, _]
[_, :ellipsis, _] -> add(render(token), 0)
[:leftParen, _, _]
[:leftBracket, _, _]
[:dot, _, _]
[:exclam, _, _] -> add(render(token), 0)
[:minus, _, _] -> if lastLastType {
:leftParen, :leftBracket, :leftBrace
:ifKeyword
:exclam
:minus
:newline
:comma
:colon -> add(render(token), 0)
_ -> if connectingToken?(lastLastType) {
true -> add(render(token), 0)
_ -> add(' ' << render(token), 0)
}
}
[:colon, :identifier, _]
[:colon, :ifKeyword, _]
[:colon, :fnKeyword, _]
[:colon, :withKeyword, _]
[:colon, :trueLiteral, _]
[:colon, :falseLiteral, _] -> if lastLastType {
:leftParen, :leftBracket, :leftBrace
:rightParen, :rightBracket, :rightBrace
:ifKeyword
:exclam
:minus
:newline
:comma
:colon -> add(render(token), 0)
_ -> if connectingToken?(lastLastType) {
true -> add(render(token), 0)
_ -> add(' ' << render(token), 0)
}
}
[:leftBrace, _, _]
_ -> add(' ' << render(token), 0)
}
}
indents << indentLine(last(tokens))
indents |> with each() fn(n, i) if i {
0 -> ?
_ -> if n < indents.(i - 1) -> {
fn sub(j) if {
indents.(j) > n -> sub(j - 1)
_ -> j + 1
}
target := sub(i - 1)
if indents.(target) - n > 1 -> {
diff := indents.(target) - n
range(target, i) |> with each() fn(j) {
indents.(j) := indents.(j) - diff + 1
}
}
}
}
indentedLines := lines |> with map() fn(line, i) if line {
'' -> ''
_ -> tabs(indents.(i)) << line
}
indentedLines |> join('\n')
}
{
print: print
}
}
fn print(text) {
tokens := tokenize(text)
printer := Printer(tokens)
if shebang?(text) {
true -> text |> cut('\n') |> first() << '\n' << printer.print()
_ -> printer.print()
}
}