Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Fix build #464

Draft
wants to merge 3 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 5 additions & 9 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -61,19 +61,15 @@ set(BUILTINS_HEADERS

# Set lexer source and headers
set(LEXER_SOURCE
src/lexer/tokenizer/token.c
src/parser/lexer/token.c

src/lexer/tokenizer/tokenizer.c

src/lexer/lexer.c
src/parser/lexer/lexer.c
)

set(LEXER_HEADERS
src/lexer/tokenizer/token.h

src/lexer/tokenizer/tokenizer.h
src/parser/lexer/token.h

src/lexer/lexer.h
src/parser/lexer/lexer.h
)

# Set AST source and headers
Expand All @@ -100,7 +96,7 @@ set(PARSER_HEADERS

# Set Main executables for lexer, ast, and parser
set(LEXER_MAIN
src/lexer/main.c
src/parser/lexer/main.c
)

set(AST_MAIN
Expand Down
5 changes: 2 additions & 3 deletions Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ FROM alpine:latest
LABEL name="The One Programming Language"

# LLVM version
ARG LLVM_VERSION=12.0.1
ARG LLVM_VERSION=16.0.3

# LLVM dependencies
RUN apk --no-cache add \
Expand Down Expand Up @@ -58,8 +58,7 @@ RUN cmake --build ./build --config Debug --target all -j 6 --
WORKDIR /One/build

# Running example input.one
RUN ./lexer ../src/input.one log
RUN cat log
RUN ./lexer ../src/input.one log && cat log

# Running tests
RUN ./lexer_test
Expand Down
2 changes: 1 addition & 1 deletion src/ast/ast.h
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
#include <inttypes.h>

#include "../builtins/array.h"
#include "../lexer/tokenizer/token.h"
#include "../parser/lexer/token.h"

typedef enum _token_type TokenType;
typedef struct _location Location;
Expand Down
2 changes: 1 addition & 1 deletion src/builtins/error.c
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

**/

#include <parser/lexer/lexer.h>
#include "../parser/lexer/lexer.h"

#include "error.h"

Expand Down
48 changes: 47 additions & 1 deletion src/parser/lexer/lexer.c
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <builtins/error.h>
#include "../../builtins/error.h"

#ifdef _ONE_TEST_
#define debug_lexer(format, args...) \
Expand Down Expand Up @@ -617,3 +617,49 @@ void lexer_free()
// free(lexer.start);
// free(lexer.current);
}

/*
* @function: lexer_trace
* @description: Log and trace items of tokens
* @arguments: FILE* file_out, char* data, Token** tokens
* @return: nothing, void
*/
void lexer_trace(FILE* file_out, const char* data, Token** tokens)
{
debug_lexer("lexer_trace");

while (tokens != NULL && *tokens != NULL)
if (tokens == NULL) return;

while (*tokens != NULL)
{
Token* t = *tokens;
printf("sizeof(t) = %zu\n", sizeof t);
printf("-->1\n");
printf("-->%s\n", t == NULL ? "y" : "n");
// printf("sizeof(t) = %zu\n", sizeof t);
// printf("-->1\n");
// printf("-->%s\n", t == NULL ? "y" : "n");
char* t_name = token_name(t->type);
printf("-->2\n");
// printf("-->2\n");
bool has1 = file_convert_index_to_rc(data, t->pos.index, &t->pos.line, &t->pos.column);
printf("-->3\n");
// printf("-->3\n");
bool has2 = file_convert_index_to_rc(data, t->pos_end.index, &t->pos_end.line, &t->pos_end.column);
printf("-->4\n");
// printf("-->4\n");

fprintf(file_out, "[%zu:%zu] [%zu:%zu - %zu:%zu] %s", t->pos.tokens, t->length, t->pos.line, t->pos.column, t->pos_end.line, t->pos_end.column, t_name);
printf("-->5\n");
// printf("-->5\n");

if (t->value != NULL)
{
fprintf(file_out, ": \"%s\"", t->value);
}
fprintf(file_out, "\n");
if (t->type == TOKEN_EOF) break;
tokens++;
}
}
2 changes: 1 addition & 1 deletion src/parser/lexer/main.c
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
#include <stdio.h>
#include <stdlib.h>

#include <builtins/file.h>
#include "../../builtins/file.h"

#include "lexer.h"
extern Token* current;
Expand Down
2 changes: 1 addition & 1 deletion src/parser/lexer/test.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,5 +2,5 @@

clear
bash build.sh
./lexer ../input.one log
./lexer ../../input.one log
cat log
49 changes: 48 additions & 1 deletion src/parser/lexer/token.c
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,8 @@

#include "token.h"
#include "lexer.h"
#include <builtins/error.h>
#include "../../builtins/array.h"
#include "../../builtins/error.h"

#ifdef _ONE_TEST_
#define debug_token(format, args...) \
Expand Down Expand Up @@ -587,3 +588,49 @@ bool token_is_skip(TokenType type)
type == TOKEN_SKIP_COMMENT_SINGLE ||
type == TOKEN_SKIP_COMMENT_MULTI);
}

/*
* @function: tokenizer_string
* @description: Create a array of tokens from a one program source-code char*
* @inputs: char* of a One program source-code
* @return: Array of Token
*/
Token** tokenizer_string(char* data)
{
debug_token("tokenizer_string");
debug_token("tokenizer_string: %s", data);

lexer_init(data);

Array tokens;
array_init(&tokens);

size_t i;
Token* t;
for (;;)
{
t = lexer_scan();
// printf("==>%s\n", token_name(t->type));
array_push(&tokens, t);
// debug_lexer("parser_scan: print_token %s", token_name(t->type));
jbampton marked this conversation as resolved.
Show resolved Hide resolved
if (t->type == TOKEN_ERROR)
{
printf("Error: %s\n", t->value);
break;
}
else if (t->type == TOKEN_EOF)
break;
}

debug_token("tokenizer_string: count of tokens is %d", tokens.count);
debug_token("tokenizer_string: size of tokens is %d", tokens.size);

// while (*data != '\0')
// {
// Token* t = token_make_value(TOKEN_VALUE_IDENTIFIER, (char*){data});
// array_push(&tokens, t);
// data++;
// }

return (Token**)tokens.data;
}
8 changes: 8 additions & 0 deletions src/parser/lexer/token.h
Original file line number Diff line number Diff line change
Expand Up @@ -407,4 +407,12 @@ size_t token_utf8_string_length(char* s);
*/
bool token_is_skip(TokenType type);

/*
* @function: tokenizer_string
* @description: Create a array of tokens from a one program source-code char*
* @inputs: char* of a One program source-code
* @return: Array of Token
*/
Token** tokenizer_string(char* data);

#endif // _ONE_TOKENIZER_TOKEN_H_
2 changes: 1 addition & 1 deletion src/parser/parser.h
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@

#include "parser_token.h"

#include <builtins/array.h>
#include "../builtins/array.h"

typedef struct
{
Expand Down
2 changes: 1 addition & 1 deletion test/build.sh
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
#!/usr/bin/env bash

FLAGS="-o"
FILES="../src/builtins/array.c ../src/builtins/error.c ../src/builtins/file.c ../src/ast/ast.c ../src/parser/parser.c ../src/parser/parser_token.c ../src/lexer/lexer.c ../src/lexer/tokenizer/token.c ../src/lexer/tokenizer/tokenizer.c -lLLVM-12 -D_ONE_TEST_"
FILES="../src/builtins/array.c ../src/builtins/error.c ../src/builtins/file.c ../src/ast/ast.c ../src/parser/parser.c ../src/parser/parser_token.c ../src/parser/lexer/lexer.c ../src/parser/lexer/token.c -lLLVM-12 -D_ONE_TEST_"
CC="clang"

# compile argument
Expand Down
2 changes: 1 addition & 1 deletion test/test.h
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@

#include "../src/builtins/file.h"

#include "../src/lexer/lexer.h"
#include "../src/parser/lexer/lexer.h"
#include "../src/parser/parser.h"

typedef struct
Expand Down