aboutsummaryrefslogtreecommitdiffstatshomepage
diff options
authorLinus Torvalds <torvalds@ppc970.osdl.org>2004-09-01 12:45:47 -0700
committerLinus Torvalds <torvalds@ppc970.osdl.org>2005-04-07 21:03:00 -0700
commit80b880d61d0cbcee8bb6561d14f7cae841cfa2e2 (patch)
tree1ae6bfc6867a4e767d04ed35f62b32ab50806272
parentf2c8ee9082054c19d0635f79db16191e4c932074 (diff)
downloadsparse-dev-80b880d61d0cbcee8bb6561d14f7cae841cfa2e2.tar.gz
Make "next_path" be per-stream for better "include_next".
Maybe this makes us gcc-compatible. Maybe it doesn't.
-rw-r--r--check.c4
-rw-r--r--compile.c2
-rw-r--r--obfuscate.c2
-rw-r--r--pre-process.c27
-rw-r--r--symbol.c2
-rw-r--r--test-lexing.c2
-rw-r--r--test-linearize.c4
-rw-r--r--test-parsing.c2
-rw-r--r--token.h7
-rw-r--r--tokenize.c7
10 files changed, 29 insertions, 30 deletions
diff --git a/check.c b/check.c
index e9d30498..53db70b6 100644
--- a/check.c
+++ b/check.c
@@ -81,12 +81,12 @@ int main(int argc, char **argv)
die("No such file: %s", filename);
// Tokenize the input stream
- token = tokenize(filename, fd, NULL);
+ token = tokenize(filename, fd, NULL, includepath);
close(fd);
// Prepend any "include" file to the stream.
if (include_fd >= 0)
- token = tokenize(include, include_fd, token);
+ token = tokenize(include, include_fd, token, includepath);
// Prepend the initial built-in stream
token = tokenize_buffer(pre_buffer, pre_buffer_size, token);
diff --git a/compile.c b/compile.c
index 3fe191e1..3f3370f9 100644
--- a/compile.c
+++ b/compile.c
@@ -79,7 +79,7 @@ int main(int argc, char **argv)
die("No such file: %s", argv[1]);
// Tokenize the input stream
- token = tokenize(filename, fd, NULL);
+ token = tokenize(filename, fd, NULL, includepath);
close(fd);
// Prepend the initial built-in stream
diff --git a/obfuscate.c b/obfuscate.c
index ff283f57..faa42a47 100644
--- a/obfuscate.c
+++ b/obfuscate.c
@@ -110,7 +110,7 @@ int main(int argc, char **argv)
init_ctype();
// Tokenize the input stream
- token = tokenize(filename, fd, NULL);
+ token = tokenize(filename, fd, NULL, includepath);
close(fd);
// Pre-process the stream
diff --git a/pre-process.c b/pre-process.c
index 5464c829..8c013161 100644
--- a/pre-process.c
+++ b/pre-process.c
@@ -44,9 +44,8 @@ const char *includepath[INCLUDEPATHS+1] = {
NULL
};
-const char **sys_includepath = includepath + 0;
-const char **gcc_includepath = includepath + 2;
-const char **next_includepath = includepath;
+static const char **sys_includepath = includepath + 0;
+static const char **gcc_includepath = includepath + 2;
#define MARK_STREAM_NONCONST(pos) do { \
if (stream->constant != CONSTANT_FILE_NOPE) { \
@@ -594,7 +593,7 @@ static const char *token_name_sequence(struct token *token, int endop, struct to
return buffer;
}
-static int try_include(const char *path, int plen, const char *filename, int flen, struct token **where)
+static int try_include(const char *path, int plen, const char *filename, int flen, struct token **where, const char **next_path)
{
int fd;
static char fullname[PATH_MAX];
@@ -609,7 +608,7 @@ static int try_include(const char *path, int plen, const char *filename, int fle
if (fd >= 0) {
char * streamname = __alloc_bytes(plen + flen);
memcpy(streamname, fullname, plen + flen);
- *where = tokenize(streamname, fd, *where);
+ *where = tokenize(streamname, fd, *where, next_path);
close(fd);
return 1;
}
@@ -621,9 +620,8 @@ static int do_include_path(const char **pptr, struct token **list, struct token
const char *path;
while ((path = *pptr++) != NULL) {
- if (!try_include(path, strlen(path), filename, flen, list))
+ if (!try_include(path, strlen(path), filename, flen, list, pptr))
continue;
- next_includepath = pptr;
return 1;
}
return 0;
@@ -636,7 +634,7 @@ static void do_include(int local, struct stream *stream, struct token **list, st
/* Absolute path? */
if (filename[0] == '/') {
- if (try_include("", 0, filename, flen, list))
+ if (try_include("", 0, filename, flen, list, includepath))
return;
goto out;
}
@@ -651,7 +649,7 @@ static void do_include(int local, struct stream *stream, struct token **list, st
slash = strrchr(path, '/');
plen = slash ? slash - path : 0;
- if (try_include(path, plen, filename, flen, list))
+ if (try_include(path, plen, filename, flen, list, includepath))
return;
}
@@ -698,7 +696,7 @@ static int handle_include(struct stream *stream, struct token **list, struct tok
static int handle_include_next(struct stream *stream, struct token **list, struct token *token)
{
- return handle_include_path(stream, list, token, next_includepath);
+ return handle_include_path(stream, list, token, stream->next_path);
}
static int token_different(struct token *t1, struct token *t2)
@@ -1285,19 +1283,17 @@ static int handle_nostdinc(struct stream *stream, struct token **line, struct to
* Do we have any non-system includes?
* Clear them out if so..
*/
- stdinc = sys_includepath - includepath;
+ stdinc = gcc_includepath - sys_includepath;
if (stdinc) {
- const char **src = sys_includepath;
- const char **dst = includepath;
+ const char **src = gcc_includepath;
+ const char **dst = sys_includepath;
for (;;) {
if (!(*dst = *src))
break;
dst++;
src++;
}
- sys_includepath -= stdinc;
gcc_includepath -= stdinc;
- next_includepath = includepath;
}
return 1;
}
@@ -1315,7 +1311,6 @@ static void add_path_entry(struct token *token, const char *path)
dst = sys_includepath;
sys_includepath++;
gcc_includepath++;
- next_includepath = includepath;
/*
* Move them all up starting at "sys_includepath",
diff --git a/symbol.c b/symbol.c
index e2991998..2aea21a2 100644
--- a/symbol.c
+++ b/symbol.c
@@ -550,7 +550,7 @@ struct ident __func___ident = __INIT_IDENT("__func__");
void init_symbols(void)
{
- int stream = init_stream("builtin", -1);
+ int stream = init_stream("builtin", -1, includepath);
struct sym_init *ptr;
hash_ident(&sizeof_ident);
diff --git a/test-lexing.c b/test-lexing.c
index 0e971e67..ce249700 100644
--- a/test-lexing.c
+++ b/test-lexing.c
@@ -31,7 +31,7 @@ int main(int argc, char **argv)
// Initialize type system
init_ctype();
- token = tokenize(argv[1], fd, NULL);
+ token = tokenize(argv[1], fd, NULL, includepath);
close(fd);
token = preprocess(token);
diff --git a/test-linearize.c b/test-linearize.c
index 549d0143..300a8b75 100644
--- a/test-linearize.c
+++ b/test-linearize.c
@@ -67,12 +67,12 @@ int main(int argc, char **argv)
die("No such file: %s", filename);
// Tokenize the input stream
- token = tokenize(filename, fd, NULL);
+ token = tokenize(filename, fd, NULL, includepath);
close(fd);
// Prepend any "include" file to the stream.
if (include_fd >= 0)
- token = tokenize(include, include_fd, token);
+ token = tokenize(include, include_fd, token, includepath);
// Prepend the initial built-in stream
token = tokenize_buffer(pre_buffer, pre_buffer_size, token);
diff --git a/test-parsing.c b/test-parsing.c
index c01e21bd..307997cf 100644
--- a/test-parsing.c
+++ b/test-parsing.c
@@ -59,7 +59,7 @@ int main(int argc, char **argv)
die("No such file: %s", argv[1]);
// Tokenize the input stream
- token = tokenize(filename, fd, NULL);
+ token = tokenize(filename, fd, NULL, includepath);
close(fd);
// Prepend the initial built-in stream
diff --git a/token.h b/token.h
index 5fe81b29..ecb8ea01 100644
--- a/token.h
+++ b/token.h
@@ -30,9 +30,12 @@ enum constantfile {
CONSTANT_FILE_YES // Yes
};
+extern const char *includepath[];
+
struct stream {
int fd;
const char *name;
+ const char **next_path;
/* Use these to check for "already parsed" */
enum constantfile constant;
@@ -176,7 +179,7 @@ extern int preprocessing, verbose;
extern struct token eof_token_entry;
#define eof_token(x) ((x) == &eof_token_entry)
-extern int init_stream(const char *, int fd);
+extern int init_stream(const char *, int fd, const char **next_path);
extern struct ident *hash_ident(struct ident *);
extern struct ident *built_in_ident(const char *);
extern struct token *built_in_token(int, const char *);
@@ -184,7 +187,7 @@ extern const char *show_special(int);
extern const char *show_ident(const struct ident *);
extern const char *show_string(const struct string *string);
extern const char *show_token(const struct token *);
-extern struct token * tokenize(const char *, int, struct token *);
+extern struct token * tokenize(const char *, int, struct token *, const char **next_path);
extern struct token * tokenize_buffer(unsigned char *, unsigned long, struct token *);
extern void die(const char *, ...);
diff --git a/tokenize.c b/tokenize.c
index 17e8ede9..e418a026 100644
--- a/tokenize.c
+++ b/tokenize.c
@@ -146,7 +146,7 @@ const char *show_token(const struct token *token)
}
}
-int init_stream(const char *name, int fd)
+int init_stream(const char *name, int fd, const char **next_path)
{
int stream = input_stream_nr;
struct stream *current;
@@ -163,6 +163,7 @@ int init_stream(const char *name, int fd)
memset(current, 0, sizeof(*current));
current->name = name;
current->fd = fd;
+ current->next_path = next_path;
current->constant = CONSTANT_FILE_MAYBE;
if (fd >= 0 && fstat(fd, &st) == 0 && S_ISREG(st.st_mode)) {
int i;
@@ -874,14 +875,14 @@ struct token * tokenize_buffer(unsigned char *buffer, unsigned long size, struct
return begin;
}
-struct token * tokenize(const char *name, int fd, struct token *endtoken)
+struct token * tokenize(const char *name, int fd, struct token *endtoken, const char **next_path)
{
struct token *begin;
stream_t stream;
unsigned char buffer[BUFSIZE];
int idx;
- idx = init_stream(name, fd);
+ idx = init_stream(name, fd, next_path);
if (idx < 0) {
// info(endtoken->pos, "File %s is const", name);
return endtoken;