3 ;;; Lexical analysis of a vaguely C-like language
5 ;;; (c) 2009 Straylight/Edgeware
8 ;;;----- Licensing notice ---------------------------------------------------
10 ;;; This file is part of the Simple Object Definition system.
12 ;;; SOD is free software; you can redistribute it and/or modify
13 ;;; it under the terms of the GNU General Public License as published by
14 ;;; the Free Software Foundation; either version 2 of the License, or
15 ;;; (at your option) any later version.
17 ;;; SOD is distributed in the hope that it will be useful,
18 ;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
19 ;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
20 ;;; GNU General Public License for more details.
22 ;;; You should have received a copy of the GNU General Public License
23 ;;; along with SOD; if not, write to the Free Software Foundation,
24 ;;; Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
28 ;;;--------------------------------------------------------------------------
29 ;;; Basic lexical analyser infrastructure.
34 ((stream :initarg :stream
38 :type (or character null)
40 (pushback-chars :initform nil
42 (token-type :initform nil
44 (token-value :initform nil
45 :accessor token-value)
46 (pushback-tokens :initform nil
49 "Base class for lexical analysers.
51 The lexer reads characters from STREAM, which, for best results, wants to
52 be a POSITION-AWARE-INPUT-STREAM.
54 The lexer provides one-character lookahead by default: the current
55 lookahead character is available to subclasses in the slot CHAR. Before
56 beginning lexical analysis, the lookahead character needs to be
57 established with NEXT-CHAR. If one-character lookahead is insufficient,
58 the analyser can push back an arbitrary number of characters using
61 The NEXT-TOKEN function scans and returns the next token from the STREAM,
62 and makes it available as TOKEN-TYPE and TOKEN-VALUE, providing one-token
63 lookahead. A parser using the lexical analyser can push back tokens using
66 For convenience, the lexer implements a FILE-LOCATION method (delegated to
67 the underlying stream)."))
71 (defgeneric scan-token (lexer)
73 "Internal function for scanning tokens from an input stream.
75 Implementing a method on this function is the main responsibility of LEXER
76 subclasses; it is called by the user-facing NEXT-TOKEN function.
78 The method should consume characters (using NEXT-CHAR) as necessary, and
79 return two values: a token type and token value. These will be stored in
80 the corresponding slots in the lexer object in order to provide the user
81 with one-token lookahead."))
83 (defgeneric next-token (lexer)
85 "Scan a token from an input stream.
87 This function scans a token from an input stream. Two values are
88 returned: a `token type' and a `token value'. These are opaque to the
89 LEXER base class, but the intent is that the token type be significant to
90 determining the syntax of the input, while the token value carries any
91 additional information about the token's semantic content. The token type
92 and token value are also made available for lookahead via accessors
93 TOKEN-TYPE and TOKEN-NAME on the LEXER object.
95 If tokens have been pushed back (see PUSHBACK-TOKEN) then they are
96 returned one by one instead of scanning the stream.")
98 (:method ((lexer lexer))
99 (with-slots (pushback-tokens token-type token-value) lexer
100 (setf (values token-type token-value)
102 (let ((pushback (pop pushback-tokens)))
103 (values (car pushback) (cdr pushback)))
104 (scan-token lexer))))))
106 (defgeneric pushback-token (lexer token-type &optional token-value)
108 "Push a token back into the lexer.
110 Make the given TOKEN-TYPE and TOKEN-VALUE be the current lookahead token.
111 The previous lookahead token is pushed down, and will be made available
112 agan once this new token is consumed by NEXT-TOKEN. The FILE-LOCATION is
113 not affected by pushing tokens back. The TOKEN-TYPE and TOKEN-VALUE be
114 anything at all: for instance, they need not be values which can actually
115 be returned by NEXT-TOKEN.")
117 (:method ((lexer lexer) new-token-type &optional new-token-value)
118 (with-slots (pushback-tokens token-type token-value) lexer
119 (push (cons token-type token-value) pushback-tokens)
120 (setf token-type new-token-type
121 token-value new-token-value))))
123 (defgeneric next-char (lexer)
125 "Fetch the next character from the LEXER's input stream.
127 Read a character from the input stream, and store it in the LEXER's CHAR
128 slot. The character stored is returned. If characters have been pushed
129 back then pushed-back characters are used instead of the input stream.
131 (This function is primarily intended for the use of lexer subclasses.)")
133 (:method ((lexer lexer))
134 (with-slots (stream char pushback-chars) lexer
135 (setf char (if pushback-chars
137 (read-char stream nil))))))
139 (defgeneric pushback-char (lexer char)
141 "Push the CHAR back into the lexer.
143 Make CHAR be the current lookahead character (stored in the LEXER's CHAR
144 slot). The previous lookahead character is pushed down, and will be made
145 available again once this character is consumed by NEXT-CHAR.
147 (This function is primarily intended for the use of lexer subclasses.)")
149 (:method ((lexer lexer) new-char)
150 (with-slots (char pushback-chars) lexer
151 (push char pushback-chars)
152 (setf char new-char))))
154 (defgeneric fixup-stream* (lexer thunk)
156 "Helper function for WITH-LEXER-STREAM.
158 This function does the main work for WITH-LEXER-STREAM. The THUNK is
159 invoked on a single argument, the LEXER's underlying STREAM.")
161 (:method ((lexer lexer) thunk)
162 (with-slots (stream char pushback-chars) lexer
164 (error "Lexer has pushed-back characters."))
165 (unread-char char stream)
167 (funcall thunk stream)
168 (setf char (read-char stream nil))))))
170 (defmacro with-lexer-stream ((streamvar lexer) &body body)
171 "Evaluate BODY with STREAMVAR bound to the LEXER's input stream.
173 The STREAM is fixed up so that the next character read (e.g., using
174 READ-CHAR) will be the lexer's current lookahead character. Once the BODY
175 completes, the next character in the stream is read and set as the
176 lookahead character. It is an error if the lexer has pushed-back
177 characters (since these can't be pushed back into the input stream
180 `(fixup-stream* ,lexer
184 (defmethod file-location ((lexer lexer))
185 (with-slots (stream) lexer
186 (file-location stream)))
188 (defgeneric skip-spaces (lexer)
190 "Skip over whitespace characters in the LEXER.")
191 (:method ((lexer lexer))
192 (do ((ch (lexer-char lexer) (next-char lexer)))
193 ((not (whitespace-char-p ch))))))
195 ;;;--------------------------------------------------------------------------
199 (lexer wanted-token-type &key (errorp t) (consumep t) default)
200 (with-slots (token-type token-value) lexer
201 (cond ((eql token-type wanted-token-type)
203 (when consumep (next-token lexer))))
205 (cerror* "Expected ~A but found ~A"
206 (format-token wanted-token-type)
207 (format-token token-type token-value))
212 ;;;--------------------------------------------------------------------------
215 (defun make-keyword-table (&rest keywords)
216 "Construct a keyword table for the lexical analyser.
218 The KEYWORDS arguments are individual keywords, either as strings or as
219 (WORD . VALUE) pairs. A string argument is equivalent to a pair listing
220 the string itself as WORD and the corresponding keyword symbol (forced to
221 uppercase) as the VALUE."
223 (let ((table (make-hash-table :test #'equal)))
224 (dolist (item keywords)
225 (multiple-value-bind (word keyword)
227 (values (car item) (cdr item))
228 (values item (intern (string-upcase item) :keyword)))
229 (setf (gethash word table) keyword)))
232 (defparameter *sod-keywords*
235 ;; Words with important meanings to us.
237 "import" "load" "lisp" "typename"
240 ;; Words with a meaning to C's type system.
241 "char" "int" "float" "void"
242 "long" "short" "signed" "unsigned" "double"
243 "const" "volatile" "restrict"
244 "struct" "union" "enum"))
246 (defclass sod-lexer (lexer)
247 ((keywords :initarg :keywords
248 :initform *sod-keywords*
250 :reader lexer-keywords))
252 "Lexical analyser for the SOD lanuage.
254 See the LEXER class for the gory details about the lexer protocol."))
256 (defun format-token (token-type &optional token-value)
257 (when (typep token-type 'lexer)
258 (let ((lexer token-type))
259 (setf token-type (token-type lexer)
260 token-value (token-value lexer))))
261 (etypecase token-type
262 ((eql :eof) "<end-of-file>")
263 ((eql :string) "<string-literal>")
264 ((eql :char) "<character-literal>")
265 ((eql :id) (format nil "<identifier~@[ `~A'~]>" token-value))
266 (keyword (format nil "`~(~A~)'" token-type))
267 (character (format nil "~:[<~:C>~;`~C'~]"
268 (and (graphic-char-p token-type)
269 (char/= token-type #\space))
272 (defmethod scan-token ((lexer sod-lexer))
273 (with-slots (stream char keywords) lexer
279 ;; End-of-file brings its own peculiar joy.
280 ((null ch) (return (values :eof t)))
282 ;; Ignore whitespace and continue around for more.
283 ((whitespace-char-p ch) (go scan))
286 ((or (char= ch #\") (char= ch #\'))
287 (with-default-error-location (file-location lexer)
290 (with-output-to-string (out)
293 (setf ch (next-char lexer))
296 "Unexpected end of file in string/character constant")
299 (cond ((char= ch quote) (return))
300 ((char= ch #\\) (getch)))
301 (write-char ch out))))))
302 (setf ch (next-char lexer))
304 (#\" (return (values :string string)))
305 (#\' (case (length string)
306 (0 (cerror* "Empty character constant")
307 (return (values :char #\?)))
308 (1 (return (values :char (char string 0))))
310 "Multiple characters in character constant")
311 (return (values :char (char string 0))))))))))
313 ;; Pick out identifiers and keywords.
314 ((or (alpha-char-p ch) (char= ch #\_))
316 ;; Scan a sequence of alphanumerics and underscores. We could
317 ;; allow more interesting identifiers, but it would damage our C
318 ;; lexical compatibility.
319 (let ((id (with-output-to-string (out)
322 (setf ch (next-char lexer))
324 (not (or (alphanumericp ch)
328 ;; Check to see whether we match any keywords.
329 (multiple-value-bind (keyword foundp) (gethash id keywords)
330 (return (values (if foundp keyword :id) id)))))
332 ;; Pick out numbers. Currently only integers, but we support
336 ;; Sort out the prefix. If we're looking at `0b', `0o' or `0x'
337 ;; (maybe uppercase) then we've got a funny radix to deal with.
338 ;; Otherwise, a leading zero signifies octal (daft, I know), else
339 ;; we're left with decimal.
340 (multiple-value-bind (radix skip-char)
343 (case (and (setf ch (next-char lexer))
350 ;; If we last munched an interesting letter, we need to skip over
351 ;; it. That's what the SKIP-CHAR flag is for.
353 (setf ch (next-char lexer)))
355 ;; Scan an integer. While there are digits, feed them into the
357 (do ((accum 0 (+ (* accum radix) digit))
358 (digit (and ch (digit-char-p ch radix))
359 (and ch (digit-char-p ch radix))))
360 ((null digit) (return-from scan-token
361 (values :integer accum)))
362 (setf ch (next-char lexer)))))
364 ;; A slash might be the start of a comment.
366 (setf ch (next-char lexer))
369 ;; Comment up to the end of the line.
372 (setf ch (next-char lexer))
373 (when (or (null ch) (char= ch #\newline))
376 ;; Comment up to the next `*/'.
380 (case (setf ch (next-char lexer))
385 (case (setf ch (next-char lexer))
387 (#\/ (setf ch (next-char lexer))
394 ;; False alarm. (The next character is already set up.)
396 (return (values #\/ t)))))
398 ;; A dot: might be `...'. Tread carefully! We need more lookahead
399 ;; than is good for us.
401 (setf ch (next-char lexer))
403 (setf ch (next-char lexer))
404 (cond ((eql ch #\.) (return (values :ellpisis nil)))
405 (t (pushback-char lexer #\.)
406 (return (values #\. t)))))
408 (return (values #\. t)))))
410 ;; Anything else is a lone delimiter.
412 (return (multiple-value-prog1
414 (next-char lexer)))))
417 ;; Scan a new character and try again.
418 (setf ch (next-char lexer))
421 ;;;--------------------------------------------------------------------------
424 (defclass c-fragment ()
425 ((location :initarg :location
427 :accessor c-fragment-location)
430 :accessor c-fragment-text))
432 "Represents a fragment of C code to be written to an output file.
434 A C fragment is aware of its original location, and will bear proper #line
435 markers when written out."))
437 (defgeneric write-fragment (fragment stream)
439 "Writes a fragment to the output stream, marking its source properly.")
441 (:method ((fragment c-fragment) stream)
442 (with-slots (location text) fragment
443 (format stream "~&#line ~D ~S~%~A~&"
444 (file-location-line location)
445 (namestring (file-location-pathname location))
447 (format stream "#line ~D ~S~%"
448 (1+ (position-aware-stream-line stream))
449 (namestring (stream-pathname stream))))))
451 (defun scan-c-fragment (lexer end-chars)
452 "Snarfs a sequence of C tokens with balanced brackets.
454 Reads and consumes characters from the LEXER's stream, and returns them as
455 a string. The string will contain whole C tokens, up as far as an
456 occurrence of one of the END-CHARS (a list) which (a) is not within a
457 string or character literal or comment, and (b) appears at the outer level
458 of nesting of brackets (whether round, curly or square -- again counting
459 only brackets which aren't themselves within string/character literals or
460 comments. The final END-CHAR is not consumed.
462 An error is signalled if either the stream ends before an occurrence of
463 one of the END-CHARS, or if mismatching brackets are encountered. No
464 other attempt is made to ensure that the characters read are in fact a
467 Both original /*...*/ and new //... comments are recognized. Trigraphs
468 and digraphs are currently not recognized."
470 (let ((output (make-string-output-stream))
471 (ch (lexer-char lexer))
472 (start-floc (file-location lexer))
476 ;; Main loop. At the top of this loop, we've already read a
477 ;; character into CH. This is usually read at the end of processing
478 ;; the individual character, though sometimes (following `/', for
479 ;; example) it's read speculatively because we need one-character
483 "Read the next character into CH; complain if we hit EOF."
484 (unless (setf ch (next-char lexer))
485 (cerror*-with-location start-floc
486 "Unexpected end-of-file in C fragment")
490 "Write the character to the output buffer."
491 (write-char ch output))
493 "Push a closing delimiter onto the stack."
498 ;; Hack: if the first character is a newline, discard it. Otherwise
499 ;; (a) the output fragment will look funny, and (b) the location
500 ;; information will be wrong.
501 (when (eql ch #\newline)
504 ;; And fetch characters.
507 ;; Here we're outside any string or character literal, though we
508 ;; may be nested within brackets. So, if there's no delimiter, and
509 ;; we've found the end character, we're done.
510 (when (and (null delim) (member ch end-chars))
513 ;; Otherwise take a copy of the character, and work out what to do
518 ;; Starting a literal. Continue until we find a matching
519 ;; character not preceded by a `\'.
532 ;; Various kinds of opening bracket. Stash the current
533 ;; delimiter, and note that we're looking for a new one.
534 (#\( (push-delim #\)))
535 (#\[ (push-delim #\]))
536 (#\{ (push-delim #\}))
538 ;; Various kinds of closing bracket. If it matches the current
539 ;; delimeter then unstack the next one along. Otherwise
540 ;; something's gone wrong: C syntax doesn't allow unmatched
544 (setf delim (pop stack))
545 (cerror* "Unmatched `~C'." ch))
548 ;; A slash. Maybe a comment next. But maybe not...
551 ;; Examine the next character to find out how to proceed.
555 ;; A second slash -- eat until the end of the line.
561 (when (eql ch #\newline)
565 ;; A star -- eat until we find a star-slash. Since the star
566 ;; might be preceded by another star, we use a little state
573 ;; Main state. If we read a star, switch to star state;
574 ;; otherwise eat the character and try again.
582 ;; Star state. If we read a slash, we're done; if we read
583 ;; another star, stay in star state; otherwise go back to
595 ;; Something else. Eat it and continue.
598 ;; Return the fragment we've collected.
599 (make-instance 'c-fragment
601 :text (get-output-stream-string output)))))
603 (defun c-fragment-reader (stream char arg)
604 "Reader for C-fragment syntax #{ ... stuff ... }."
605 (declare (ignore char arg))
606 (let ((lexer (make-instance 'sod-lexer
609 (scan-c-fragment lexer '(#\}))))
611 ;;;--------------------------------------------------------------------------
615 (with-input-from-string (in "
617 123 0432 0b010123 0xc0ffee __burp_32 class
620 class integer : integral_domain {
625 (let* ((stream (make-instance 'position-aware-input-stream
628 (lexer (make-instance 'sod-lexer
630 :keywords *sod-keywords*))
634 (multiple-value-bind (tokty tokval) (next-token lexer)
635 (push (list tokty tokval) list)
636 (when (eql tokty :eof)
640 ;;;----- That's all, folks --------------------------------------------------