Support tokenizing arbitrary content.

Bug: 6110399
Change-Id: I37be63b68934fd451e6dffbf7d6079553619c0a3
This commit is contained in:
Jeff Brown 2012-04-17 18:19:50 -07:00
parent 172a62a224
commit a8be8fa096
2 changed files with 28 additions and 5 deletions

View File

@ -28,7 +28,8 @@ namespace android {
* A simple tokenizer for loading and parsing ASCII text files line by line.
*/
class Tokenizer {
Tokenizer(const String8& filename, FileMap* fileMap, char* buffer, size_t length);
Tokenizer(const String8& filename, FileMap* fileMap, char* buffer,
bool ownBuffer, size_t length);
public:
~Tokenizer();
@ -41,6 +42,15 @@ public:
*/
static status_t open(const String8& filename, Tokenizer** outTokenizer);
/**
* Prepares to tokenize the contents of a string.
*
* Returns NO_ERROR and a tokenizer for the string, if successful.
* Otherwise returns an error and sets outTokenizer to NULL.
*/
static status_t fromContents(const String8& filename,
const char* contents, Tokenizer** outTokenizer);
/**
* Returns true if at the end of the file.
*/
@ -111,6 +121,7 @@ private:
String8 mFilename;
FileMap* mFileMap;
char* mBuffer;
bool mOwnBuffer;
size_t mLength;
const char* mCurrent;

View File

@ -35,15 +35,18 @@ static inline bool isDelimiter(char ch, const char* delimiters) {
return strchr(delimiters, ch) != NULL;
}
Tokenizer::Tokenizer(const String8& filename, FileMap* fileMap, char* buffer, size_t length) :
Tokenizer::Tokenizer(const String8& filename, FileMap* fileMap, char* buffer,
bool ownBuffer, size_t length) :
mFilename(filename), mFileMap(fileMap),
mBuffer(buffer), mLength(length), mCurrent(buffer), mLineNumber(1) {
mBuffer(buffer), mOwnBuffer(ownBuffer), mLength(length),
mCurrent(buffer), mLineNumber(1) {
}
Tokenizer::~Tokenizer() {
if (mFileMap) {
mFileMap->release();
} else {
}
if (mOwnBuffer) {
delete[] mBuffer;
}
}
@ -65,6 +68,7 @@ status_t Tokenizer::open(const String8& filename, Tokenizer** outTokenizer) {
size_t length = size_t(stat.st_size);
FileMap* fileMap = new FileMap();
bool ownBuffer = false;
char* buffer;
if (fileMap->create(NULL, fd, 0, length, true)) {
fileMap->advise(FileMap::SEQUENTIAL);
@ -77,6 +81,7 @@ status_t Tokenizer::open(const String8& filename, Tokenizer** outTokenizer) {
// The length we obtained from stat is wrong too (it will always be 4096)
// so we must trust that read will read the entire file.
buffer = new char[length];
ownBuffer = true;
ssize_t nrd = read(fd, buffer, length);
if (nrd < 0) {
result = -errno;
@ -89,7 +94,7 @@ status_t Tokenizer::open(const String8& filename, Tokenizer** outTokenizer) {
}
if (!result) {
*outTokenizer = new Tokenizer(filename, fileMap, buffer, length);
*outTokenizer = new Tokenizer(filename, fileMap, buffer, ownBuffer, length);
}
}
close(fd);
@ -97,6 +102,13 @@ status_t Tokenizer::open(const String8& filename, Tokenizer** outTokenizer) {
return result;
}
status_t Tokenizer::fromContents(const String8& filename,
const char* contents, Tokenizer** outTokenizer) {
*outTokenizer = new Tokenizer(filename, NULL,
const_cast<char*>(contents), false, strlen(contents));
return OK;
}
String8 Tokenizer::getLocation() const {
String8 result;
result.appendFormat("%s:%d", mFilename.string(), mLineNumber);