2025-10-23 16:12:29 +02:00
|
|
|
(import (rnrs))
|
|
|
|
|
|
|
|
|
|
(define get-word
|
2025-10-23 16:36:18 +02:00
|
|
|
(lambda (file)
|
2025-10-23 16:12:29 +02:00
|
|
|
(let ([c (lookahead-char file)])
|
2025-10-23 16:36:18 +02:00
|
|
|
(cond
|
|
|
|
|
[(char-alphabetic? c)
|
|
|
|
|
(get-char file)
|
|
|
|
|
(string-append (string c) (get-word file))]
|
|
|
|
|
[else ""]))))
|
2025-10-23 16:12:29 +02:00
|
|
|
|
|
|
|
|
(define get-words
|
|
|
|
|
(lambda (file)
|
2025-10-23 16:36:18 +02:00
|
|
|
(let ([c (lookahead-char file)])
|
|
|
|
|
(cond
|
|
|
|
|
[(eof-object? c) '()]
|
|
|
|
|
[(char-whitespace? c)
|
|
|
|
|
(get-char file)
|
|
|
|
|
(get-words file)]
|
|
|
|
|
[(char-alphabetic? c)
|
|
|
|
|
(cons (get-word file) (get-words file))]))))
|
2025-10-23 16:12:29 +02:00
|
|
|
|
|
|
|
|
(define tokenize
|
|
|
|
|
(lambda (filename)
|
|
|
|
|
(let ([file (open-input-file filename)])
|
|
|
|
|
(get-words file))))
|
|
|
|
|
|
|
|
|
|
(display (tokenize "tokens.txt"))
|