summaryrefslogtreecommitdiff
path: root/source/Syntax/Chunk.hs
blob: 604ccf6af09c5f949ade652f8aaa98381be36088 (plain)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
module Syntax.Chunk where

import Base
import Syntax.Token

import Data.List qualified as List


-- LATER This is just a naïve implementation of token chunks.
-- It and  the current tokenizer should probably be replaced by
-- a more efficient implementation that does chunking
-- directly while tokenizing.

chunkify :: [Located Token] -> [[Located Token]]
chunkify [] = []
chunkify (tok : toks) = case unLocated tok of
    BeginEnv env | isTopLevelEnv env ->
        let (axiomToks, otherToks) = envBlock env toks
        in  (tok : axiomToks) : chunkify otherToks
    --
    -- Skip all tokens outside of toplevel environments.
    _ -> chunkify toks

isTopLevelEnv :: Text -> Bool
isTopLevelEnv env = env `elem`
    [ "abbreviation"
    , "axiom"
    , "claim"
    , "corollary"
    , "datatype"
    , "definition"
    , "inductive"
    , "lemma"
    , "proof"
    , "proposition"
    , "signature"
    , "struct"
    , "theorem"
    ]

envBlock :: Text -> [Located Token] -> ([Located Token], [Located Token])
envBlock env toks =
    let (pre, post) = List.span (\tok -> unLocated tok /= EndEnv env) toks
    in case post of
        [] -> (pre, post)
        endEnv : post' -> (pre ++ [endEnv], post')