Support tokenizing arbitrary content.
Bug: 6110399
Change-Id: I37be63b68934fd451e6dffbf7d6079553619c0a3
diff --git a/libs/utils/Tokenizer.cpp b/libs/utils/Tokenizer.cpp
index efda2bf..7067533 100644
--- a/libs/utils/Tokenizer.cpp
+++ b/libs/utils/Tokenizer.cpp
@@ -35,15 +35,18 @@
return strchr(delimiters, ch) != NULL;
}
-Tokenizer::Tokenizer(const String8& filename, FileMap* fileMap, char* buffer, size_t length) :
+Tokenizer::Tokenizer(const String8& filename, FileMap* fileMap, char* buffer,
+ bool ownBuffer, size_t length) :
mFilename(filename), mFileMap(fileMap),
- mBuffer(buffer), mLength(length), mCurrent(buffer), mLineNumber(1) {
+ mBuffer(buffer), mOwnBuffer(ownBuffer), mLength(length),
+ mCurrent(buffer), mLineNumber(1) {
}
Tokenizer::~Tokenizer() {
if (mFileMap) {
mFileMap->release();
- } else {
+ }
+ if (mOwnBuffer) {
delete[] mBuffer;
}
}
@@ -65,6 +68,7 @@
size_t length = size_t(stat.st_size);
FileMap* fileMap = new FileMap();
+ bool ownBuffer = false;
char* buffer;
if (fileMap->create(NULL, fd, 0, length, true)) {
fileMap->advise(FileMap::SEQUENTIAL);
@@ -77,6 +81,7 @@
// The length we obtained from stat is wrong too (it will always be 4096)
// so we must trust that read will read the entire file.
buffer = new char[length];
+ ownBuffer = true;
ssize_t nrd = read(fd, buffer, length);
if (nrd < 0) {
result = -errno;
@@ -89,7 +94,7 @@
}
if (!result) {
- *outTokenizer = new Tokenizer(filename, fileMap, buffer, length);
+ *outTokenizer = new Tokenizer(filename, fileMap, buffer, ownBuffer, length);
}
}
close(fd);
@@ -97,6 +102,13 @@
return result;
}
+status_t Tokenizer::fromContents(const String8& filename,
+ const char* contents, Tokenizer** outTokenizer) {
+ *outTokenizer = new Tokenizer(filename, NULL,
+ const_cast<char*>(contents), false, strlen(contents));
+ return OK;
+}
+
String8 Tokenizer::getLocation() const {
String8 result;
result.appendFormat("%s:%d", mFilename.string(), mLineNumber);