/* ----------------------------------------------------------------------------- Copyright 2023-2024 Kevin P. Barry Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ----------------------------------------------------------------------------- */ // Author: Kevin P. Barry [ta0kira@gmail.com] define TextStream { $ReadOnly[source]$ @value String source @value Int tokenStart @value Int tokenEnd @value Int currentLine @value Int currentChar new (source) { return TextStream{ source, 0, 0, 1, 1 } } current () (char) { char <- empty if (tokenEnd < source.size()) { char <- source.readAt(tokenEnd) } } tokenSize () { return tokenEnd-tokenStart } forward () { if (tokenEnd < source.size()) { tokenEnd <- tokenEnd+1 } return self } reset () { tokenEnd <- tokenStart return self } take () { cleanup { while (tokenStart < tokenEnd) { if (source.readAt(tokenStart) == '\n') { currentChar <- 1 currentLine <- currentLine+1 } else { currentChar <- currentChar+1 } } update { tokenStart <- tokenStart+1 } tokenStart <- tokenEnd } in return preview() } preview () { return source.subSequence(tokenStart, tokenSize()) } currentLine () { return currentLine } currentChar () { return currentChar } atEnd () { return tokenStart >= source.size() } } define UnformattedFormatter { new () { return delegate -> #self } formatLeaf (_, value) { return value.formatted() } createSection (_) { return String.builder() } } define StreamTokenizer { $ReadOnlyExcept[]$ @value #c context @value Tokenizer<#c, #v> tokenizer new (context, tokenizer) { return delegate -> StreamTokenizer<#c, #v> } tokenizeAll (input, output) { scoped { optional #v token <- empty } in while (!input.atEnd()) { token <- input `tokenizer.tokenize` context } update { // Process new token. if (`present` token) { \ output.append(`require` token) } else { // Tokenizing failed => let the caller deal with the rest of the input. break } } return self } } define CharType { $ReadOnlyExcept[]$ @category KVReader escapes <- HashedMap.new() .set('\t', "\\t") .set('\n', "\\n") .set('\r', "\\r") .set('\"', "\\\"") .set('\\', "\\\\") @category SetReader binChars <- HashedSet.new() `AppendH:from` "01".defaultOrder() @category SetReader octChars <- HashedSet.new() `AppendH:from` "01234567".defaultOrder() @category SetReader decChars <- HashedSet.new() `AppendH:from` "0123456789".defaultOrder() @category SetReader hexChars <- HashedSet.new() `AppendH:from` "0123456789abcdefABCDEF".defaultOrder() @category SetReader whitespaceChars <- HashedSet.new() `AppendH:from` "\n\t\r ".defaultOrder() lower (c) { return c >= 'a' && c <= 'z' } upper (c) { return c >= 'A' && c <= 'Z' } digit (c) { return `decChars.member` c } binDigit (c) { return `binChars.member` c } octDigit (c) { return `octChars.member` c } hexDigit (c) { return `hexChars.member` c } alphaNum (c) { return lower(c) || upper(c) || digit(c) } whitespace (c) { return `whitespaceChars.member` c } binChars () { return binChars } octChars () { return octChars } decChars () { return decChars } hexChars () { return hexChars } oneOf (c, allowed) (match) { match <- false traverse (allowed.defaultOrder() -> Char c2) { if (c == c2) { return true } } } escapeBreaks (string) { [Append & Build] builder <- String.builder() traverse (string.defaultOrder() -> Char c) { scoped { optional String replacement <- escapes.get(c) } in if (`present` replacement) { \ builder.append(`require` replacement) } else { \ builder.append(c) } } return builder.build() } }