diff --git a/include/utils/Tokenizer.h b/include/utils/Tokenizer.h index c7db5fb9e..bb25f374c 100644 --- a/include/utils/Tokenizer.h +++ b/include/utils/Tokenizer.h @@ -28,7 +28,8 @@ namespace android { * A simple tokenizer for loading and parsing ASCII text files line by line. */ class Tokenizer { - Tokenizer(const String8& filename, FileMap* fileMap, char* buffer, size_t length); + Tokenizer(const String8& filename, FileMap* fileMap, char* buffer, + bool ownBuffer, size_t length); public: ~Tokenizer(); @@ -41,6 +42,15 @@ public: */ static status_t open(const String8& filename, Tokenizer** outTokenizer); + /** + * Prepares to tokenize the contents of a string. + * + * Returns NO_ERROR and a tokenizer for the string, if successful. + * Otherwise returns an error and sets outTokenizer to NULL. + */ + static status_t fromContents(const String8& filename, + const char* contents, Tokenizer** outTokenizer); + /** * Returns true if at the end of the file. */ @@ -111,6 +121,7 @@ private: String8 mFilename; FileMap* mFileMap; char* mBuffer; + bool mOwnBuffer; size_t mLength; const char* mCurrent; diff --git a/libs/utils/Tokenizer.cpp b/libs/utils/Tokenizer.cpp index efda2bfff..7067533b1 100644 --- a/libs/utils/Tokenizer.cpp +++ b/libs/utils/Tokenizer.cpp @@ -35,15 +35,18 @@ static inline bool isDelimiter(char ch, const char* delimiters) { return strchr(delimiters, ch) != NULL; } -Tokenizer::Tokenizer(const String8& filename, FileMap* fileMap, char* buffer, size_t length) : +Tokenizer::Tokenizer(const String8& filename, FileMap* fileMap, char* buffer, + bool ownBuffer, size_t length) : mFilename(filename), mFileMap(fileMap), - mBuffer(buffer), mLength(length), mCurrent(buffer), mLineNumber(1) { + mBuffer(buffer), mOwnBuffer(ownBuffer), mLength(length), + mCurrent(buffer), mLineNumber(1) { } Tokenizer::~Tokenizer() { if (mFileMap) { mFileMap->release(); - } else { + } + if (mOwnBuffer) { delete[] mBuffer; } } @@ -65,6 +68,7 @@ status_t Tokenizer::open(const String8& filename, Tokenizer** outTokenizer) { size_t length = size_t(stat.st_size); FileMap* fileMap = new FileMap(); + bool ownBuffer = false; char* buffer; if (fileMap->create(NULL, fd, 0, length, true)) { fileMap->advise(FileMap::SEQUENTIAL); @@ -77,6 +81,7 @@ status_t Tokenizer::open(const String8& filename, Tokenizer** outTokenizer) { // The length we obtained from stat is wrong too (it will always be 4096) // so we must trust that read will read the entire file. buffer = new char[length]; + ownBuffer = true; ssize_t nrd = read(fd, buffer, length); if (nrd < 0) { result = -errno; @@ -89,7 +94,7 @@ status_t Tokenizer::open(const String8& filename, Tokenizer** outTokenizer) { } if (!result) { - *outTokenizer = new Tokenizer(filename, fileMap, buffer, length); + *outTokenizer = new Tokenizer(filename, fileMap, buffer, ownBuffer, length); } } close(fd); @@ -97,6 +102,13 @@ status_t Tokenizer::open(const String8& filename, Tokenizer** outTokenizer) { return result; } +status_t Tokenizer::fromContents(const String8& filename, + const char* contents, Tokenizer** outTokenizer) { + *outTokenizer = new Tokenizer(filename, NULL, + const_cast(contents), false, strlen(contents)); + return OK; +} + String8 Tokenizer::getLocation() const { String8 result; result.appendFormat("%s:%d", mFilename.string(), mLineNumber);