Merge pull request #5 from m1el/fix-build

Fix build: change paths, add missing error
This commit is contained in:
m1el 2024-03-11 13:41:34 +02:00 committed by GitHub
commit a9c0cbeb8f
3 changed files with 47 additions and 77 deletions

View file

@ -2,17 +2,23 @@ CC = gcc
CFLAGS_EXTRA = CFLAGS_EXTRA =
CFLAGS = -Wall -Wextra -Wpedantic -std=c17 -O3 CFLAGS = -Wall -Wextra -Wpedantic -std=c17 -O3
.PHONY: clean .PHONY: clean build-dir hbas example
hbas: src/hbas.c hbas: build/hbas
example: build/example.hbf
build:
mkdir -p build
build/hbas: build src/hbas.c
${CC} ${CFLAGS} ${CFLAGS_EXTRA} src/hbas.c -o build/hbas ${CC} ${CFLAGS} ${CFLAGS_EXTRA} src/hbas.c -o build/hbas
example: hbas example.S build/example.hbf: build build/hbas examples/example.S
./hbas < example.S > example ./hbas < examples/example.S > build/example.hbf
xxd example xxd build/example.hbf
clean: clean:
rm -f example hbas rm -rf build
all: all:
hbas hbas

View file

@ -17,6 +17,7 @@ typedef enum AsmError_e {
ErrNeedDirectiveAfterDot, ErrNeedDirectiveAfterDot,
ErrDirectiveNotImplemented, ErrDirectiveNotImplemented,
ErrUnexpectedToken, ErrUnexpectedToken,
ErrTriedNegateNonNumber,
} AsmError; } AsmError;
char *ERRORS[] = { char *ERRORS[] = {
"Success", "Success",
@ -37,4 +38,5 @@ char *ERRORS[] = {
"Expected directive after dot", "Expected directive after dot",
"Directive is not implemented", "Directive is not implemented",
"Unexpected token", "Unexpected token",
"Negation only works on numbers",
}; };

View file

@ -1,5 +1,4 @@
typedef enum TokenKind_e typedef enum TokenKind_e {
{
TokInvalid = '!', TokInvalid = '!',
TokEOF = '$', TokEOF = '$',
TokIdent = 'A', TokIdent = 'A',
@ -12,24 +11,20 @@ typedef enum TokenKind_e
TokComment = ';', TokComment = ';',
TokNewline = 'n', TokNewline = 'n',
} TokenKind; } TokenKind;
typedef struct Token_s typedef struct Token_s {
{
TokenKind kind; TokenKind kind;
size_t start; size_t start;
size_t len; size_t len;
uint64_t num; uint64_t num;
} Token; } Token;
Token token_ident(char *input, size_t len, size_t pos) Token token_ident(char *input, size_t len, size_t pos) {
{
size_t start = pos; size_t start = pos;
while (pos < len) while (pos < len) {
{
char chr = input[pos]; char chr = input[pos];
char chru = chr & ~0x20; char chru = chr & ~0x20;
int good = chr == '_' || (chr >= '0' && chr <= '9') || (chru >= 'A' && chru <= 'Z'); int good = chr == '_' || (chr >= '0' && chr <= '9') || (chru >= 'A' && chru <= 'Z');
if (!good) if (!good) {
{
break; break;
} }
pos += 1; pos += 1;
@ -37,8 +32,7 @@ Token token_ident(char *input, size_t len, size_t pos)
return (Token){TokIdent, start, pos - start, 0}; return (Token){TokIdent, start, pos - start, 0};
} }
Token token_number(char *input, size_t len, size_t pos) Token token_number(char *input, size_t len, size_t pos) {
{
char *ptr = &input[pos]; char *ptr = &input[pos];
char next = '\0'; char next = '\0';
size_t start = pos; size_t start = pos;
@ -48,30 +42,21 @@ Token token_number(char *input, size_t len, size_t pos)
uint64_t pre_overflow; uint64_t pre_overflow;
AsmError bad_num = ErrOk; AsmError bad_num = ErrOk;
if (pos + 1 < len) if (pos + 1 < len) {
{
next = ptr[1] & ~0x20; next = ptr[1] & ~0x20;
} }
if (input[pos] == '0') if (input[pos] == '0') {
{ if (next == 'X') {
if (next == 'X')
{
base = 16; base = 16;
pos += 2; pos += 2;
} } else if (next == 'D') {
else if (next == 'D')
{
base = 10; base = 10;
pos += 2; pos += 2;
} } else if (next == 'O') {
else if (next == 'O')
{
base = 8; base = 8;
pos += 2; pos += 2;
} } else if (next == 'B') {
else if (next == 'B')
{
base = 2; base = 2;
pos += 2; pos += 2;
} }
@ -79,30 +64,23 @@ Token token_number(char *input, size_t len, size_t pos)
pre_overflow = (~(size_t)0) / base; pre_overflow = (~(size_t)0) / base;
// valid: "0x_0", "0_" // valid: "0x_0", "0_"
// invalid: "0x_" // invalid: "0x_"
while (pos < len) while (pos < len) {
{
uint64_t digit; uint64_t digit;
uint64_t next; uint64_t next;
char chr = input[pos]; char chr = input[pos];
char chru = chr & ~0x20; char chru = chr & ~0x20;
if (chr == '_') if (chr == '_') {
{
pos += 1; pos += 1;
continue; continue;
} }
digit = (uint64_t)chr - (uint64_t)'0'; digit = (uint64_t)chr - (uint64_t)'0';
if (digit >= 10) if (digit >= 10) {
{
digit = (uint64_t)chru - (uint64_t)('A' - 10); digit = (uint64_t)chru - (uint64_t)('A' - 10);
} }
if (digit >= base) if (digit >= base) {
{ if (chr >= '0' && chr <= '9') {
if (chr >= '0' && chr <= '9')
{
bad_num = ErrBadNumDigit; bad_num = ErrBadNumDigit;
} } else if (chru >= 'A' && chru <= 'Z') {
else if (chru >= 'A' && chru <= 'Z')
{
bad_num = ErrBadNumDigit; bad_num = ErrBadNumDigit;
} }
break; break;
@ -112,75 +90,59 @@ Token token_number(char *input, size_t len, size_t pos)
digits += 1; digits += 1;
next = rv * base + digit; next = rv * base + digit;
if (rv > pre_overflow || next < rv) if (rv > pre_overflow || next < rv) {
{
bad_num = ErrBadNumOverflow; bad_num = ErrBadNumOverflow;
break; break;
} }
rv = next; rv = next;
} }
if (digits == 0) if (digits == 0) {
{
bad_num = ErrBadNumNoDigit; bad_num = ErrBadNumNoDigit;
} }
if (bad_num) if (bad_num) {
{
return (Token){TokBadNumber, start, pos - start, bad_num}; return (Token){TokBadNumber, start, pos - start, bad_num};
} } else {
else
{
return (Token){TokNumber, start, pos - start, rv}; return (Token){TokNumber, start, pos - start, rv};
} }
} }
Token token(char *input, size_t len, size_t pos) Token token(char *input, size_t len, size_t pos) {
{
char chr, chru; char chr, chru;
char *ptr = &input[pos]; char *ptr = &input[pos];
while (pos < len && (input[pos] == ' ' || input[pos] == '\t')) while (pos < len && (input[pos] == ' ' || input[pos] == '\t')) {
{
pos += 1; pos += 1;
} }
if (pos == len) if (pos == len) {
{
return (Token){TokEOF, pos, 0, 0}; return (Token){TokEOF, pos, 0, 0};
} }
ptr = &input[pos]; ptr = &input[pos];
chr = *ptr; chr = *ptr;
if (chr == ',' || chr == '-' || chr == '.' || chr == ':') if (chr == ',' || chr == '-' || chr == '.' || chr == ':') {
{
return (Token){(TokenKind)chr, pos, 1, 0}; return (Token){(TokenKind)chr, pos, 1, 0};
} }
if (chr == '\n') if (chr == '\n') {
{
return (Token){TokNewline, pos, 1, 0}; return (Token){TokNewline, pos, 1, 0};
} }
if (chr == '\r') if (chr == '\r') {
{ if (pos + 1 < len && ptr[1] == '\n') {
if (pos + 1 < len && ptr[1] == '\n')
{
return (Token){TokNewline, pos, 2, 0}; return (Token){TokNewline, pos, 2, 0};
} }
return (Token){TokNewline, pos, 1, 0}; return (Token){TokNewline, pos, 1, 0};
} }
if (chr == ';') if (chr == ';') {
{
size_t clen = 1; size_t clen = 1;
while (pos + clen < len && ptr[clen] != '\n' && ptr[clen] != '\r') while (pos + clen < len && ptr[clen] != '\n' && ptr[clen] != '\r') {
{
clen += 1; clen += 1;
} }
return (Token){TokComment, pos, clen, 0}; return (Token){TokComment, pos, clen, 0};
} }
if (chr >= '0' && chr <= '9') if (chr >= '0' && chr <= '9') {
{
return token_number(input, len, pos); return token_number(input, len, pos);
} }
chru = chr & ~0x20; chru = chr & ~0x20;
if (chr == '_' || (chru >= 'A' && chru <= 'Z')) if (chr == '_' || (chru >= 'A' && chru <= 'Z')) {
{
return token_ident(input, len, pos); return token_ident(input, len, pos);
} }
return (Token){TokInvalid, pos, 1, 0}; return (Token){TokInvalid, pos, 1, 0};