Compare commits
	
		
			4 Commits
		
	
	
		
			7356a5000a
			...
			master
		
	
	| Author | SHA1 | Date | |
|---|---|---|---|
| 1d246f3489 | |||
| 9c2e0cc2fc | |||
| 7f3798a9c6 | |||
| d850b0dadf | 
							
								
								
									
										11
									
								
								README.md
									
									
									
									
									
								
							
							
						
						
									
										11
									
								
								README.md
									
									
									
									
									
								
							| @@ -1,11 +1,8 @@ | ||||
| # Fudge-chez | ||||
| This is the Fudge programming language implemented in Chez scheme. | ||||
| # scheme-fudge | ||||
| This is the Fudge programming language implemented in r7rs-scheme. | ||||
| For the original project, go to Wannie's | ||||
| [repository](https://shit-co.de/wanniepannie/fudge2.1). | ||||
|  | ||||
| # Installation | ||||
| To run fudge-chez, install [Chez](https://scheme.com) and run the | ||||
| program with ```chez --program fudge-chez.ss``` | ||||
|  | ||||
| ## Why Chez scheme? | ||||
| Because Scheme is elegant. (In theory.) | ||||
| To run fudge-chez, install a r7rs-compliant scheme implementation | ||||
| and execute the script fudge.ss. | ||||
|   | ||||
							
								
								
									
										101
									
								
								fudge-chez.ss
									
									
									
									
									
								
							
							
						
						
									
										101
									
								
								fudge-chez.ss
									
									
									
									
									
								
							| @@ -1,101 +0,0 @@ | ||||
| (import (rnrs)) | ||||
|  | ||||
| (define get-word | ||||
|   (lambda (file) | ||||
|     (let ([c (lookahead-char file)]) | ||||
|       (cond | ||||
|         [(eof-object? c) ""] | ||||
|         [(char-alphabetic? c) | ||||
|          (get-char file) | ||||
|          (string-append (string c) (get-word file))] | ||||
|         [else ""])))) | ||||
|  | ||||
| (define char->digit | ||||
|   (lambda (c) | ||||
|     (- (char->integer c) (char->integer #\0)))) | ||||
|  | ||||
| ; get-number supports only nonnegative integers. | ||||
| (define get-number | ||||
|   (lambda (file) | ||||
|     (let f ([total 0]) | ||||
|       (let ([c (lookahead-char file)]) | ||||
|         (if (char-numeric? c) | ||||
|           (begin | ||||
|             (get-char file) | ||||
|             (f (+ (* total 10) (char->digit c)))) | ||||
|           total))))) | ||||
|  | ||||
| (define get-token | ||||
|   (lambda (file) | ||||
|     (let ([c (lookahead-char file)]) | ||||
|       (cond | ||||
|         [(eof-object? c) 'end-of-file] | ||||
|         [(char-whitespace? c) | ||||
|          (get-char file) | ||||
|          (get-token file)] | ||||
|         [(char-alphabetic? c) | ||||
|          (get-word file)] | ||||
|         [(char-numeric? c) | ||||
|          (get-number file)] | ||||
|         [(eq? c #\+) (get-char file) '+] | ||||
|         [(eq? c #\-) (get-char file) '-] | ||||
|         [(eq? c #\*) (get-char file) '*] | ||||
|         [(eq? c #\/) (get-char file) '/] | ||||
|         [(eq? c #\=) (get-char file) #\=] | ||||
|         [else (error 'get-token "Bad character" c)])))) | ||||
|  | ||||
| (define get-tokens | ||||
|   (lambda (file) | ||||
|     (let ([token (get-token file)]) | ||||
|       (if (eq? token 'end-of-file) | ||||
|           '() | ||||
|           (cons token (get-tokens file)))))) | ||||
|      | ||||
| (define tokenize | ||||
|   (lambda (filename) | ||||
|     (let ([file (open-input-file filename)]) | ||||
|       (get-tokens file)))) | ||||
|  | ||||
| (define test-lexer-fail | ||||
|   (lambda (input want got) | ||||
|     (display "text-lexer-fail: input=") | ||||
|     (write input) | ||||
|     (display "; want: ") | ||||
|     (write want) | ||||
|     (display "; got: ") | ||||
|     (write got) | ||||
|     (display ".") | ||||
|     (newline))) | ||||
|  | ||||
| (define test-lexer-success | ||||
|   (lambda (input got) | ||||
|     (display "text-lexer-success: input=") | ||||
|     (write input) | ||||
|     (display "; got: ") | ||||
|     (write got) | ||||
|     (display ".") | ||||
|     (newline))) | ||||
|  | ||||
| (define test-lexer | ||||
|   (lambda (input want) | ||||
|     (let ([port (open-string-input-port input)]) | ||||
|       (call/cc | ||||
|         (lambda (i-got-exception) | ||||
|           (with-exception-handler | ||||
|             (lambda (x) | ||||
|               (display "test-lexer-fail: input ") | ||||
|               (write input) | ||||
|               (display " throws ") | ||||
|               (write x) | ||||
|               (display ".") | ||||
|               (newline) | ||||
|               (i-got-exception x)) | ||||
|             (lambda () | ||||
|               (let ([got (get-tokens port)]) | ||||
|                 (if (equal? got want) | ||||
|                 (test-lexer-success input got) | ||||
|                 (test-lexer-fail input want got)))))))))) | ||||
|  | ||||
| (test-lexer "hello world" '("hello" "world")) | ||||
| (test-lexer "+ * / -" '(+ * / -)) | ||||
| (test-lexer "identifier = 14" '("identifier" = 14)) | ||||
							
								
								
									
										21
									
								
								fudge.scm
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										21
									
								
								fudge.scm
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,21 @@ | ||||
| (import (scheme base) | ||||
|         (scheme write) | ||||
|         (scheme file) | ||||
|         (scheme process-context) | ||||
|         (lexer)) | ||||
|  | ||||
| (define usage | ||||
|   (lambda () | ||||
|     (display "usage: fudge.scm <filename>") | ||||
|     (newline))) | ||||
|  | ||||
| (unless (= (length (command-line)) 2) | ||||
|   (usage) | ||||
|   (exit)) | ||||
|  | ||||
| (define input-string | ||||
|   (read-string | ||||
|     100000000000000 | ||||
|     (open-input-file (car (cdr (command-line)))))) | ||||
|  | ||||
| (display (tokenize input-string)) | ||||
							
								
								
									
										19
									
								
								lexer-test.scm
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										19
									
								
								lexer-test.scm
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,19 @@ | ||||
| (import (scheme base) | ||||
|         (scheme write) | ||||
|         (lexer)) | ||||
|  | ||||
| (define test | ||||
|   (lambda (input want) | ||||
|     (let ((got (tokenize input))) | ||||
|       (unless (equal? got want) | ||||
|         (display "test fail: input = ") | ||||
|         (display input) | ||||
|         (display "; want = ") | ||||
|         (display want) | ||||
|         (display "; got = ") | ||||
|         (display got) | ||||
|         (newline))))) | ||||
|  | ||||
| (test "Struct Enum" '(Struct Enum)) | ||||
| (test "this is identifier" '("this" "is" "identifier")) | ||||
| (test "+ - = != :=" '(+ - = != :=)) | ||||
							
								
								
									
										72
									
								
								lexer.sld
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										72
									
								
								lexer.sld
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,72 @@ | ||||
| (define-library (lexer) | ||||
|   (export tokenize) | ||||
|   (import (scheme base) | ||||
|           (scheme write) | ||||
|           (scheme char)) | ||||
|   (begin | ||||
|     (define-record-type lexer | ||||
|       (make-lexer-with-index input index) | ||||
|       lexer? | ||||
|       (input lexer-input lexer-input-set!) | ||||
|       (index lexer-index lexer-index-set!)) | ||||
|  | ||||
|     (define-record-type lexer-error | ||||
|       (make-lexer-error type message) | ||||
|       lexer-error? | ||||
|       (type lexer-error-type) | ||||
|       (message lexer-error-message) | ||||
|     )) | ||||
|  | ||||
|     (define make-lexer | ||||
|       (lambda (input) | ||||
|         (make-lexer-with-index input 0))) | ||||
|  | ||||
|     (define lexer-end? | ||||
|       (lambda (lexer) | ||||
|         (>= (lexer-index lexer) | ||||
|             (string-length (lexer-input lexer))))) | ||||
|  | ||||
|     (define lexer-peek | ||||
|       (lambda (lexer) | ||||
|         (if (lexer-end? lexer) | ||||
|           (eof-object) | ||||
|           (string-ref (lexer-input lexer) | ||||
|                       (lexer-index lexer))))) | ||||
|  | ||||
|     (define lexer-eat! | ||||
|       (lambda (lexer) | ||||
|         (if (lexer-end? lexer) | ||||
|           (eof-object) | ||||
|           (let ((index (lexer-index lexer))) | ||||
|             (lexer-index-set! lexer (+ index 1)) | ||||
|             (string-ref (lexer-input lexer) index))))) | ||||
|  | ||||
|     (define get-token | ||||
|       (lambda (lexer) | ||||
|         (let ((c (lexer-peek lexer))) | ||||
|           (cond | ||||
|             ((eof-object? c) "") | ||||
|             ((char-alphabetic? c) | ||||
|              (lexer-eat! lexer) | ||||
|              (string-append (string c) (get-token lexer))) | ||||
|             (else ""))))) | ||||
|  | ||||
|     (define get-tokens | ||||
|       (lambda (lexer) | ||||
|         (let loop ((tokens #())) | ||||
|           (let ((c (lexer-peek lexer))) | ||||
|             (cond | ||||
|               ((eof-object? c) tokens) | ||||
|               ((char-whitespace? c) | ||||
|                (lexer-eat! lexer) | ||||
|                (loop tokens)) | ||||
|               ((char-alphabetic? c) | ||||
|                (let ((token (get-token lexer))) | ||||
|                  (if (lexer-error? token) | ||||
|                      token | ||||
|                      (loop ))))))) | ||||
|                 | ||||
|              | ||||
|     (define tokenize | ||||
|       (lambda (input) | ||||
|         (get-tokens (make-lexer input)))))) | ||||
		Reference in New Issue
	
	Block a user