aboutsummaryrefslogtreecommitdiffstatshomepage
path: root/tokenize.c
diff options
authorChristopher Li <spase@chrisli.org>2008-12-17 22:01:17 +0300
committerAlexey Zaytsev <alexey.zaytsev@gmail.com>2008-12-18 20:30:22 +0300
commit9a46ba1b5f134fdab62b60374073476cf5ff0599 (patch)
tree935d3c9935449d9f5807d08861ee3ffab291935d /tokenize.c
parented3d4c5cf65ff50220312353b340e5f6609a5944 (diff)
downloadsparse-dev-9a46ba1b5f134fdab62b60374073476cf5ff0599.tar.gz
Remove pre_buffer
This patch removes the pre_buffer completely. Instead, sparse will tokenized the buffer during add_pre_buffer(). Sparse just tracks the beginning and end of pre_buffer. Reviewed-by: Alexey Zaytsev <alexey.zaytsev@gmail.com> Signed-Off-By: Christopher Li <spase@chrisli.org>
Diffstat (limited to 'tokenize.c')
-rw-r--r--tokenize.c21
1 files changed, 11 insertions, 10 deletions
diff --git a/tokenize.c b/tokenize.c
index d154882d..7c41a56d 100644
--- a/tokenize.c
+++ b/tokenize.c
@@ -304,7 +304,7 @@ static inline int nextchar(stream_t *stream)
struct token eof_token_entry;
-static void mark_eof(stream_t *stream, struct token *end_token)
+static struct token *mark_eof(stream_t *stream)
{
struct token *end;
@@ -315,11 +315,10 @@ static void mark_eof(stream_t *stream, struct token *end_token)
eof_token_entry.next = &eof_token_entry;
eof_token_entry.pos.newline = 1;
- if (!end_token)
- end_token = &eof_token_entry;
- end->next = end_token;
+ end->next = &eof_token_entry;
*stream->tokenlist = end;
stream->tokenlist = NULL;
+ return end;
}
static void add_token(stream_t *stream)
@@ -912,7 +911,7 @@ static struct token *setup_stream(stream_t *stream, int idx, int fd,
return begin;
}
-static void tokenize_stream(stream_t *stream, struct token *endtoken)
+static struct token *tokenize_stream(stream_t *stream)
{
int c = nextchar(stream);
while (c != EOF) {
@@ -927,22 +926,22 @@ static void tokenize_stream(stream_t *stream, struct token *endtoken)
stream->whitespace = 1;
c = nextchar(stream);
}
- mark_eof(stream, endtoken);
+ return mark_eof(stream);
}
-struct token * tokenize_buffer(void *buffer, unsigned long size, struct token *endtoken)
+struct token * tokenize_buffer(void *buffer, unsigned long size, struct token **endtoken)
{
stream_t stream;
struct token *begin;
begin = setup_stream(&stream, 0, -1, buffer, size);
- tokenize_stream(&stream, endtoken);
+ *endtoken = tokenize_stream(&stream);
return begin;
}
struct token * tokenize(const char *name, int fd, struct token *endtoken, const char **next_path)
{
- struct token *begin;
+ struct token *begin, *end;
stream_t stream;
unsigned char buffer[BUFSIZE];
int idx;
@@ -954,6 +953,8 @@ struct token * tokenize(const char *name, int fd, struct token *endtoken, const
}
begin = setup_stream(&stream, idx, fd, buffer, 0);
- tokenize_stream(&stream, endtoken);
+ end = tokenize_stream(&stream);
+ if (endtoken)
+ end->next = endtoken;
return begin;
}