lex

Imports

Imports #

"fmt"
"os"
"path/filepath"
"slices"
"strconv"
"strings"
"text/scanner"
"cmd/asm/internal/flags"
"cmd/internal/objabi"
"cmd/internal/src"
"fmt"
"log"
"os"
"strings"
"text/scanner"
"cmd/internal/src"
"text/scanner"
"cmd/internal/src"
"text/scanner"
"cmd/internal/src"
"go/build/constraint"
"io"
"os"
"strings"
"text/scanner"
"unicode"
"cmd/asm/internal/flags"
"cmd/internal/objabi"
"cmd/internal/src"

Constants & Variables

ARR const #

const ARR

BuildComment const #

const BuildComment

Include const #

const Include

LSH const #

Asm defines some two-character lexemes. We make up a rune/ScanToken value for them - ugly but simple.

const LSH ScanToken = *ast.BinaryExpr

ROT const #

const ROT

RSH const #

const RSH

macroName const #

const macroName

panicOnError var #

var panicOnError bool

Type Aliases

ScanToken type #

A ScanToken represents an input item. It is a simple wrapping of rune, as returned by text/scanner.Scanner, plus a couple of extra values.

type ScanToken rune

Interfaces

TokenReader interface #

A TokenReader is like a reader, but returns lex tokens of type Token. It also can tell you what the text of the most recently returned token is, and where it was found. The underlying scanner elides all spaces except newline, so the input looks like a stream of Tokens; original spacing is lost but we don't need it.

type TokenReader interface {
Next() ScanToken
Text() string
File() string
Base() *src.PosBase
SetBase(*src.PosBase)
Line() int
Col() int
Close()
}

Structs

Input struct #

Input is the main input: a stack of readers and some macro definitions. It also handles #include processing (by pushing onto the input stack) and parses and instantiates macro definitions.

type Input struct {
Stack
includes []string
beginningOfLine bool
ifdefStack []bool
macros map[string]*Macro
text string
peek bool
peekToken ScanToken
peekText string
}

Macro struct #

A Macro represents the definition of a #defined macro.

type Macro struct {
name string
args []string
tokens []Token
}

Slice struct #

A Slice reads from a slice of Tokens.

type Slice struct {
tokens []Token
base *src.PosBase
line int
pos int
}

Stack struct #

A Stack is a stack of TokenReaders. As the top TokenReader hits EOF, it resumes reading the next one down.

type Stack struct {
tr []TokenReader
}

Token struct #

A Token is a scan token plus its string value. A macro is stored as a sequence of Tokens with spaces stripped.

type Token struct {
ScanToken
text string
}

Tokenizer struct #

A Tokenizer is a simple wrapping of text/scanner.Scanner, configured for our purposes and made a TokenReader. It forms the lowest level, turning text from readers into tokens.

type Tokenizer struct {
tok ScanToken
s *scanner.Scanner
base *src.PosBase
line int
file *os.File
}

Functions

Base method #

func (s *Slice) Base() *src.PosBase

Base method #

func (t *Tokenizer) Base() *src.PosBase

Base method #

func (s *Stack) Base() *src.PosBase

Close method #

func (s *Stack) Close()

Close method #

func (t *Tokenizer) Close()

Close method #

func (in *Input) Close()

Close method #

func (s *Slice) Close()

Col method #

func (t *Tokenizer) Col() int

Col method #

func (s *Slice) Col() int

Col method #

func (s *Stack) Col() int

Error method #

func (in *Input) Error(args ...interface{})

File method #

func (s *Stack) File() string

File method #

func (t *Tokenizer) File() string

File method #

func (s *Slice) File() string

IsRegisterShift function #

IsRegisterShift reports whether the token is one of the ARM register shift operators.

func IsRegisterShift(r ScanToken) bool

Line method #

func (t *Tokenizer) Line() int

Line method #

func (s *Stack) Line() int

Line method #

func (s *Slice) Line() int

Make function #

Make returns a Token with the given rune (ScanToken) and text representation.

func Make(token ScanToken, text string) Token

NewInput function #

NewInput returns an Input from the given path.

func NewInput(name string) *Input

NewLexer function #

NewLexer returns a lexer for the named file and the given link context.

func NewLexer(name string) TokenReader

NewSlice function #

func NewSlice(base *src.PosBase, line int, tokens []Token) *Slice

NewTokenizer function #

func NewTokenizer(name string, r io.Reader, file *os.File) *Tokenizer

Next method #

func (s *Stack) Next() ScanToken

Next method #

func (in *Input) Next() ScanToken

Next method #

func (t *Tokenizer) Next() ScanToken

Next method #

func (s *Slice) Next() ScanToken

Push method #

func (in *Input) Push(r TokenReader)

Push method #

Push adds tr to the top (end) of the input stack. (Popping happens automatically.)

func (s *Stack) Push(tr TokenReader)

SetBase method #

func (s *Slice) SetBase(base *src.PosBase)

SetBase method #

func (t *Tokenizer) SetBase(base *src.PosBase)

SetBase method #

func (s *Stack) SetBase(base *src.PosBase)

String method #

func (l Token) String() string

String method #

func (t ScanToken) String() string

Text method #

func (s *Slice) Text() string

Text method #

func (t *Tokenizer) Text() string

Text method #

func (in *Input) Text() string

Text method #

func (s *Stack) Text() string

Tokenize function #

Tokenize turns a string into a list of Tokens; used to parse the -D flag and in tests.

func Tokenize(str string) []Token

argsFor method #

argsFor returns a map from formal name to actual value for this argumented macro invocation. The opening parenthesis has been absorbed.

func (in *Input) argsFor(macro *Macro) map[string][]Token

collectArgument method #

collectArgument returns the actual tokens for a single argument of a macro. It also returns the token that terminated the argument, which will always be either ',' or ')'. The starting '(' has been scanned.

func (in *Input) collectArgument(macro *Macro) ([]Token, ScanToken)

define method #

#define processing.

func (in *Input) define()

defineMacro method #

defineMacro stores the macro definition in the Input.

func (in *Input) defineMacro(name string, args []string, tokens []Token)

else_ method #

#else processing

func (in *Input) else_()

enabled method #

enabled reports whether the input is enabled by an ifdef, or is at the top level.

func (in *Input) enabled() bool

endif method #

#endif processing.

func (in *Input) endif()

expectNewline method #

func (in *Input) expectNewline(directive string)

expectText method #

expectText is like Error but adds "got XXX" where XXX is a quoted representation of the most recent token.

func (in *Input) expectText(args ...interface{})

hash method #

hash processes a # preprocessor directive. It reports whether it completes.

func (in *Input) hash() bool

ifdef method #

#ifdef and #ifndef processing.

func (in *Input) ifdef(truth bool)

include method #

#include processing.

func (in *Input) include()

invokeMacro method #

invokeMacro pushes onto the input Stack a Slice that holds the macro definition with the actual parameters substituted for the formals. Invoking a macro does not touch the PC/line history.

func (in *Input) invokeMacro(macro *Macro)

isIdentRune function #

We want center dot (ยท) and division slash (โˆ•) to work as identifier characters.

func isIdentRune(ch rune, i int) bool

line method #

#line processing.

func (in *Input) line()

macroDefinition method #

macroDefinition returns the list of formals and the tokens of the definition. The argument list is nil for no parens on the definition; otherwise a list of formal argument names.

func (in *Input) macroDefinition(name string) ([]string, []Token)

macroName method #

macroName returns the name for the macro being referenced.

func (in *Input) macroName() string

predefine function #

predefine installs the macros set by the -D flag on the command line.

func predefine(defines flags.MultiFlag) map[string]*Macro

undef method #

#undef processing

func (in *Input) undef()

Generated with Arrow