You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@lucy.apache.org by nw...@apache.org on 2013/09/01 22:16:58 UTC

[lucy-commits] [10/24] Rename CharBuf to String (cnick Str)

http://git-wip-us.apache.org/repos/asf/lucy/blob/2c3dbf15/core/Lucy/Index/Lexicon.cfh
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/Lexicon.cfh b/core/Lucy/Index/Lexicon.cfh
index faf83c5..68335d4 100644
--- a/core/Lucy/Index/Lexicon.cfh
+++ b/core/Lucy/Index/Lexicon.cfh
@@ -33,10 +33,10 @@ parcel Lucy;
 
 public class Lucy::Index::Lexicon cnick Lex inherits Clownfish::Obj {
 
-    CharBuf *field;
+    String *field;
 
     public inert Lexicon*
-    init(Lexicon *self, const CharBuf *field);
+    init(Lexicon *self, const String *field);
 
     public void
     Destroy(Lexicon *self);
@@ -73,7 +73,7 @@ public class Lucy::Index::Lexicon cnick Lex inherits Clownfish::Obj {
     public abstract nullable Obj*
     Get_Term(Lexicon *self);
 
-    public CharBuf*
+    public String*
     Get_Field(Lexicon *self);
 }
 

http://git-wip-us.apache.org/repos/asf/lucy/blob/2c3dbf15/core/Lucy/Index/LexiconReader.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/LexiconReader.c b/core/Lucy/Index/LexiconReader.c
index 0e2a093..0a8b434 100644
--- a/core/Lucy/Index/LexiconReader.c
+++ b/core/Lucy/Index/LexiconReader.c
@@ -90,7 +90,7 @@ PolyLexReader_Destroy_IMP(PolyLexiconReader *self) {
 }
 
 Lexicon*
-PolyLexReader_Lexicon_IMP(PolyLexiconReader *self, const CharBuf *field,
+PolyLexReader_Lexicon_IMP(PolyLexiconReader *self, const String *field,
                           Obj *term) {
     PolyLexicon *lexicon = NULL;
 
@@ -112,7 +112,7 @@ PolyLexReader_Lexicon_IMP(PolyLexiconReader *self, const CharBuf *field,
 }
 
 uint32_t
-PolyLexReader_Doc_Freq_IMP(PolyLexiconReader *self, const CharBuf *field,
+PolyLexReader_Doc_Freq_IMP(PolyLexiconReader *self, const String *field,
                            Obj *term) {
     PolyLexiconReaderIVARS *const ivars = PolyLexReader_IVARS(self);
     uint32_t doc_freq = 0;
@@ -138,7 +138,7 @@ DefLexReader_new(Schema *schema, Folder *folder, Snapshot *snapshot,
 // parameters. Will return false if the field is not indexed or if no terms
 // are present for this field in this segment.
 static bool
-S_has_data(Schema *schema, Folder *folder, Segment *segment, CharBuf *field) {
+S_has_data(Schema *schema, Folder *folder, Segment *segment, String *field) {
     FieldType *type = Schema_Fetch_Type(schema, field);
 
     if (!type || !FType_Indexed(type)) {
@@ -148,8 +148,8 @@ S_has_data(Schema *schema, Folder *folder, Segment *segment, CharBuf *field) {
     else {
         // Bail out if there are no terms for this field in this segment.
         int32_t  field_num = Seg_Field_Num(segment, field);
-        CharBuf *seg_name  = Seg_Get_Name(segment);
-        CharBuf *file = CB_newf("%o/lexicon-%i32.dat", seg_name, field_num);
+        String *seg_name  = Seg_Get_Name(segment);
+        String *file = Str_newf("%o/lexicon-%i32.dat", seg_name, field_num);
         bool retval = Folder_Exists(folder, file);
         DECREF(file);
         return retval;
@@ -169,7 +169,7 @@ DefLexReader_init(DefaultLexiconReader *self, Schema *schema, Folder *folder,
     // Build an array of SegLexicon objects.
     ivars->lexicons = VA_new(Schema_Num_Fields(schema));
     for (uint32_t i = 1, max = Schema_Num_Fields(schema) + 1; i < max; i++) {
-        CharBuf *field = Seg_Field_Name(segment, i);
+        String *field = Seg_Field_Name(segment, i);
         if (field && S_has_data(schema, folder, segment, field)) {
             SegLexicon *lexicon = SegLex_new(schema, folder, segment, field);
             VA_Store(ivars->lexicons, i, (Obj*)lexicon);
@@ -194,7 +194,7 @@ DefLexReader_Destroy_IMP(DefaultLexiconReader *self) {
 }
 
 Lexicon*
-DefLexReader_Lexicon_IMP(DefaultLexiconReader *self, const CharBuf *field,
+DefLexReader_Lexicon_IMP(DefaultLexiconReader *self, const String *field,
                          Obj *term) {
     DefaultLexiconReaderIVARS *const ivars = DefLexReader_IVARS(self);
     int32_t     field_num = Seg_Field_Num(ivars->segment, field);
@@ -211,7 +211,7 @@ DefLexReader_Lexicon_IMP(DefaultLexiconReader *self, const CharBuf *field,
 }
 
 static TermInfo*
-S_find_tinfo(DefaultLexiconReader *self, const CharBuf *field, Obj *target) {
+S_find_tinfo(DefaultLexiconReader *self, const String *field, Obj *target) {
     DefaultLexiconReaderIVARS *const ivars = DefLexReader_IVARS(self);
     if (field != NULL && target != NULL) {
         int32_t field_num = Seg_Field_Num(ivars->segment, field);
@@ -234,13 +234,13 @@ S_find_tinfo(DefaultLexiconReader *self, const CharBuf *field, Obj *target) {
 
 TermInfo*
 DefLexReader_Fetch_Term_Info_IMP(DefaultLexiconReader *self,
-                                 const CharBuf *field, Obj *target) {
+                                 const String *field, Obj *target) {
     TermInfo *tinfo = S_find_tinfo(self, field, target);
     return tinfo ? TInfo_Clone(tinfo) : NULL;
 }
 
 uint32_t
-DefLexReader_Doc_Freq_IMP(DefaultLexiconReader *self, const CharBuf *field,
+DefLexReader_Doc_Freq_IMP(DefaultLexiconReader *self, const String *field,
                           Obj *term) {
     TermInfo *tinfo = S_find_tinfo(self, field, term);
     return tinfo ? TInfo_Get_Doc_Freq(tinfo) : 0;

http://git-wip-us.apache.org/repos/asf/lucy/blob/2c3dbf15/core/Lucy/Index/LexiconReader.cfh
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/LexiconReader.cfh b/core/Lucy/Index/LexiconReader.cfh
index adddad0..471d62a 100644
--- a/core/Lucy/Index/LexiconReader.cfh
+++ b/core/Lucy/Index/LexiconReader.cfh
@@ -36,17 +36,17 @@ public abstract class Lucy::Index::LexiconReader cnick LexReader
      * @param term Pre-locate the Lexicon to this term.
      */
     public abstract incremented nullable Lexicon*
-    Lexicon(LexiconReader *self, const CharBuf *field, Obj *term = NULL);
+    Lexicon(LexiconReader *self, const String *field, Obj *term = NULL);
 
     /** Return the number of documents where the specified term is present.
      */
     public abstract uint32_t
-    Doc_Freq(LexiconReader *self, const CharBuf *field, Obj *term);
+    Doc_Freq(LexiconReader *self, const String *field, Obj *term);
 
     /** If the term can be found, return a term info, otherwise return NULL.
      */
     abstract incremented nullable TermInfo*
-    Fetch_Term_Info(LexiconReader *self, const CharBuf *field, Obj *term);
+    Fetch_Term_Info(LexiconReader *self, const String *field, Obj *term);
 
     /** Return a LexiconReader which merges the output of other
      * LexiconReaders.
@@ -71,10 +71,10 @@ class Lucy::Index::PolyLexiconReader cnick PolyLexReader
     init(PolyLexiconReader *self, VArray *readers, I32Array *offsets);
 
     public incremented nullable Lexicon*
-    Lexicon(PolyLexiconReader *self, const CharBuf *field, Obj *term = NULL);
+    Lexicon(PolyLexiconReader *self, const String *field, Obj *term = NULL);
 
     public uint32_t
-    Doc_Freq(PolyLexiconReader *self, const CharBuf *field, Obj *term);
+    Doc_Freq(PolyLexiconReader *self, const String *field, Obj *term);
 
     public void
     Close(PolyLexiconReader *self);
@@ -97,16 +97,16 @@ class Lucy::Index::DefaultLexiconReader cnick DefLexReader
          Snapshot *snapshot, VArray *segments, int32_t seg_tick);
 
     public incremented nullable Lexicon*
-    Lexicon(DefaultLexiconReader *self, const CharBuf *field,
+    Lexicon(DefaultLexiconReader *self, const String *field,
             Obj *term = NULL);
 
     /** Return the number of documents in which the term appears.
      */
     public uint32_t
-    Doc_Freq(DefaultLexiconReader *self, const CharBuf *field, Obj *term);
+    Doc_Freq(DefaultLexiconReader *self, const String *field, Obj *term);
 
     incremented nullable TermInfo*
-    Fetch_Term_Info(DefaultLexiconReader *self, const CharBuf *field,
+    Fetch_Term_Info(DefaultLexiconReader *self, const String *field,
                     Obj *term);
 
     public void

http://git-wip-us.apache.org/repos/asf/lucy/blob/2c3dbf15/core/Lucy/Index/LexiconWriter.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/LexiconWriter.c b/core/Lucy/Index/LexiconWriter.c
index e04d11e..9ffd853 100644
--- a/core/Lucy/Index/LexiconWriter.c
+++ b/core/Lucy/Index/LexiconWriter.c
@@ -104,7 +104,7 @@ S_add_last_term_to_ix(LexiconWriter *self) {
 }
 
 void
-LexWriter_Add_Term_IMP(LexiconWriter* self, CharBuf* term_text, TermInfo* tinfo) {
+LexWriter_Add_Term_IMP(LexiconWriter* self, String* term_text, TermInfo* tinfo) {
     LexiconWriterIVARS *const ivars = LexWriter_IVARS(self);
     OutStream *dat_out = ivars->dat_out;
 
@@ -128,17 +128,17 @@ LexWriter_Start_Field_IMP(LexiconWriter *self, int32_t field_num) {
     Segment   *const segment  = LexWriter_Get_Segment(self);
     Folder    *const folder   = LexWriter_Get_Folder(self);
     Schema    *const schema   = LexWriter_Get_Schema(self);
-    CharBuf   *const seg_name = Seg_Get_Name(segment);
-    CharBuf   *const field    = Seg_Field_Name(segment, field_num);
+    String    *const seg_name = Seg_Get_Name(segment);
+    String    *const field    = Seg_Field_Name(segment, field_num);
     FieldType *const type     = Schema_Fetch_Type(schema, field);
 
     // Open outstreams.
     DECREF(ivars->dat_file);
     DECREF(ivars->ix_file);
     DECREF(ivars->ixix_file);
-    ivars->dat_file  = CB_newf("%o/lexicon-%i32.dat",  seg_name, field_num);
-    ivars->ix_file   = CB_newf("%o/lexicon-%i32.ix",   seg_name, field_num);
-    ivars->ixix_file = CB_newf("%o/lexicon-%i32.ixix", seg_name, field_num);
+    ivars->dat_file  = Str_newf("%o/lexicon-%i32.dat",  seg_name, field_num);
+    ivars->ix_file   = Str_newf("%o/lexicon-%i32.ix",   seg_name, field_num);
+    ivars->ixix_file = Str_newf("%o/lexicon-%i32.ixix", seg_name, field_num);
     ivars->dat_out = Folder_Open_Out(folder, ivars->dat_file);
     if (!ivars->dat_out) { RETHROW(INCREF(Err_get_error())); }
     ivars->ix_out = Folder_Open_Out(folder, ivars->ix_file);
@@ -156,13 +156,13 @@ LexWriter_Start_Field_IMP(LexiconWriter *self, int32_t field_num) {
 void
 LexWriter_Finish_Field_IMP(LexiconWriter *self, int32_t field_num) {
     LexiconWriterIVARS *const ivars = LexWriter_IVARS(self);
-    CharBuf *field = Seg_Field_Name(ivars->segment, field_num);
+    String *field = Seg_Field_Name(ivars->segment, field_num);
 
     // Store count of terms for this field as metadata.
     Hash_Store(ivars->counts, (Obj*)field,
-               (Obj*)CB_newf("%i32", ivars->count));
+               (Obj*)Str_newf("%i32", ivars->count));
     Hash_Store(ivars->ix_counts, (Obj*)field,
-               (Obj*)CB_newf("%i32", ivars->ix_count));
+               (Obj*)Str_newf("%i32", ivars->ix_count));
 
     // Close streams.
     OutStream_Close(ivars->dat_out);
@@ -181,7 +181,7 @@ LexWriter_Finish_Field_IMP(LexiconWriter *self, int32_t field_num) {
 }
 
 void
-LexWriter_Enter_Temp_Mode_IMP(LexiconWriter *self, const CharBuf *field,
+LexWriter_Enter_Temp_Mode_IMP(LexiconWriter *self, const String *field,
                               OutStream *temp_outstream) {
     LexiconWriterIVARS *const ivars = LexWriter_IVARS(self);
     Schema    *schema = LexWriter_Get_Schema(self);
@@ -246,9 +246,9 @@ LexWriter_Metadata_IMP(LexiconWriter *self) {
 
     // Placeholders.
     if (Hash_Get_Size(counts) == 0) {
-        Hash_Store_Str(counts, "none", 4, (Obj*)CB_newf("%i32", (int32_t)0));
+        Hash_Store_Str(counts, "none", 4, (Obj*)Str_newf("%i32", (int32_t)0));
         Hash_Store_Str(ix_counts, "none", 4,
-                       (Obj*)CB_newf("%i32", (int32_t)0));
+                       (Obj*)Str_newf("%i32", (int32_t)0));
     }
 
     Hash_Store_Str(metadata, "counts", 6, (Obj*)counts);

http://git-wip-us.apache.org/repos/asf/lucy/blob/2c3dbf15/core/Lucy/Index/LexiconWriter.cfh
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/LexiconWriter.cfh b/core/Lucy/Index/LexiconWriter.cfh
index a3c85a9..340e271 100644
--- a/core/Lucy/Index/LexiconWriter.cfh
+++ b/core/Lucy/Index/LexiconWriter.cfh
@@ -23,9 +23,9 @@ class Lucy::Index::LexiconWriter cnick LexWriter
 
     TermStepper      *term_stepper;
     TermStepper      *tinfo_stepper;
-    CharBuf          *dat_file;
-    CharBuf          *ix_file;
-    CharBuf          *ixix_file;
+    String           *dat_file;
+    String           *ix_file;
+    String           *ixix_file;
     OutStream        *dat_out;
     OutStream        *ix_out;
     OutStream        *ixix_out;
@@ -60,7 +60,7 @@ class Lucy::Index::LexiconWriter cnick LexWriter
     /** Prepare to write terms to a temporary file.
      */
     void
-    Enter_Temp_Mode(LexiconWriter *self, const CharBuf *field,
+    Enter_Temp_Mode(LexiconWriter *self, const String *field,
                     OutStream *temp_outstream);
 
     /** Stop writing terms to temp file.  Abandon (but don't close) the file.
@@ -72,7 +72,7 @@ class Lucy::Index::LexiconWriter cnick LexWriter
      * field number).
      */
     void
-    Add_Term(LexiconWriter* self, CharBuf* term_text, TermInfo* tinfo);
+    Add_Term(LexiconWriter* self, String* term_text, TermInfo* tinfo);
 
     public void
     Add_Segment(LexiconWriter *self, SegReader *reader,

http://git-wip-us.apache.org/repos/asf/lucy/blob/2c3dbf15/core/Lucy/Index/PolyLexicon.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/PolyLexicon.c b/core/Lucy/Index/PolyLexicon.c
index e2459e0..2315839 100644
--- a/core/Lucy/Index/PolyLexicon.c
+++ b/core/Lucy/Index/PolyLexicon.c
@@ -29,13 +29,13 @@ static void
 S_refresh_lex_q(SegLexQueue *lex_q, VArray *seg_lexicons, Obj *target);
 
 PolyLexicon*
-PolyLex_new(const CharBuf *field, VArray *sub_readers) {
+PolyLex_new(const String *field, VArray *sub_readers) {
     PolyLexicon *self = (PolyLexicon*)VTable_Make_Obj(POLYLEXICON);
     return PolyLex_init(self, field, sub_readers);
 }
 
 PolyLexicon*
-PolyLex_init(PolyLexicon *self, const CharBuf *field, VArray *sub_readers) {
+PolyLex_init(PolyLexicon *self, const String *field, VArray *sub_readers) {
     uint32_t  num_sub_readers = VA_Get_Size(sub_readers);
     VArray   *seg_lexicons    = VA_new(num_sub_readers);
 
@@ -208,7 +208,7 @@ SegLexQ_Less_Than_IMP(SegLexQueue *self, Obj *a, Obj *b) {
     Obj *const term_a = SegLex_Get_Term(lex_a);
     Obj *const term_b = SegLex_Get_Term(lex_b);
     UNUSED_VAR(self);
-    return CB_less_than(&term_a, &term_b);
+    return Str_less_than(&term_a, &term_b);
 }
 
 

http://git-wip-us.apache.org/repos/asf/lucy/blob/2c3dbf15/core/Lucy/Index/PolyLexicon.cfh
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/PolyLexicon.cfh b/core/Lucy/Index/PolyLexicon.cfh
index 26f6961..87eca4e 100644
--- a/core/Lucy/Index/PolyLexicon.cfh
+++ b/core/Lucy/Index/PolyLexicon.cfh
@@ -30,10 +30,10 @@ class Lucy::Index::PolyLexicon cnick PolyLex
     int32_t         size;
 
     inert incremented PolyLexicon*
-    new(const CharBuf *field, VArray *sub_readers);
+    new(const String *field, VArray *sub_readers);
 
     inert PolyLexicon*
-    init(PolyLexicon *self, const CharBuf *field, VArray *sub_readers);
+    init(PolyLexicon *self, const String *field, VArray *sub_readers);
 
     public void
     Seek(PolyLexicon *self, Obj *target = NULL);

http://git-wip-us.apache.org/repos/asf/lucy/blob/2c3dbf15/core/Lucy/Index/PolyReader.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/PolyReader.c b/core/Lucy/Index/PolyReader.c
index 74b10f2..47c441e 100644
--- a/core/Lucy/Index/PolyReader.c
+++ b/core/Lucy/Index/PolyReader.c
@@ -36,7 +36,7 @@
 
 // Obtain/release read locks and commit locks.
 static bool
-S_obtain_read_lock(PolyReader *self, const CharBuf *snapshot_filename);
+S_obtain_read_lock(PolyReader *self, const String *snapshot_filename);
 static bool 
 S_obtain_deletion_lock(PolyReader *self);
 static void
@@ -56,7 +56,7 @@ S_try_open_elements(void *context);
 struct try_read_snapshot_context {
     Snapshot *snapshot;
     Folder   *folder;
-    CharBuf  *path;
+    String   *path;
 };
 static void
 S_try_read_snapshot(void *context);
@@ -115,7 +115,7 @@ S_init_sub_readers(PolyReader *self, VArray *sub_readers) {
     for (uint32_t i = 0; i < num_sub_readers; i++) {
         SegReader *seg_reader = (SegReader*)VA_Fetch(sub_readers, i);
         Hash *components = SegReader_Get_Components(seg_reader);
-        CharBuf *api;
+        String *api;
         DataReader *component;
         starts[i] = ivars->doc_max;
         ivars->doc_max += SegReader_Doc_Max(seg_reader);
@@ -131,7 +131,7 @@ S_init_sub_readers(PolyReader *self, VArray *sub_readers) {
     }
     ivars->offsets = I32Arr_new_steal(starts, num_sub_readers);
 
-    CharBuf *api;
+    String *api;
     VArray  *readers;
     Hash_Iterate(data_readers);
     while (Hash_Next(data_readers, (Obj**)&api, (Obj**)&readers)) {
@@ -228,17 +228,17 @@ S_try_open_elements(void *context) {
     Folder     *folder            = PolyReader_Get_Folder(self);
     uint32_t    num_segs          = 0;
     uint64_t    latest_schema_gen = 0;
-    CharBuf    *schema_file       = NULL;
+    String     *schema_file       = NULL;
 
     // Find schema file, count segments.
     for (uint32_t i = 0, max = VA_Get_Size(files); i < max; i++) {
-        CharBuf *entry = (CharBuf*)VA_Fetch(files, i);
+        String *entry = (String*)VA_Fetch(files, i);
 
         if (Seg_valid_seg_name(entry)) {
             num_segs++;
         }
-        else if (CB_Starts_With_Str(entry, "schema_", 7)
-                 && CB_Ends_With_Str(entry, ".json", 5)
+        else if (Str_Starts_With_Str(entry, "schema_", 7)
+                 && Str_Ends_With_Str(entry, ".json", 5)
                 ) {
             uint64_t gen = IxFileNames_extract_gen(entry);
             if (gen > latest_schema_gen) {
@@ -262,7 +262,7 @@ S_try_open_elements(void *context) {
             schema_file = NULL;
         }
         else {
-            CharBuf *mess = MAKE_MESS("Failed to parse %o", schema_file);
+            String *mess = MAKE_MESS("Failed to parse %o", schema_file);
             DECREF(files);
             Err_throw_mess(ERR, mess);
         }
@@ -270,7 +270,7 @@ S_try_open_elements(void *context) {
 
     VArray *segments = VA_new(num_segs);
     for (uint32_t i = 0, max = VA_Get_Size(files); i < max; i++) {
-        CharBuf *entry = (CharBuf*)VA_Fetch(files, i);
+        String *entry = (String*)VA_Fetch(files, i);
 
         // Create a Segment for each segmeta.
         if (Seg_valid_seg_name(entry)) {
@@ -284,7 +284,7 @@ S_try_open_elements(void *context) {
                 VA_Push(segments, (Obj*)segment);
             }
             else {
-                CharBuf *mess = MAKE_MESS("Failed to read %o", entry);
+                String *mess = MAKE_MESS("Failed to read %o", entry);
                 DECREF(segment);
                 DECREF(segments);
                 DECREF(files);
@@ -325,7 +325,7 @@ S_try_open_elements(void *context) {
 }
 
 // For test suite.
-CharBuf* PolyReader_race_condition_debug1 = NULL;
+String* PolyReader_race_condition_debug1 = NULL;
 int32_t  PolyReader_debug1_num_passes     = 0;
 
 PolyReader*
@@ -346,7 +346,7 @@ PolyReader_do_open(PolyReader *self, Obj *index, Snapshot *snapshot,
     }
 
     while (1) {
-        CharBuf *target_snap_file;
+        String *target_snap_file;
 
         // If a Snapshot was supplied, use its file.
         if (snapshot) {
@@ -355,7 +355,7 @@ PolyReader_do_open(PolyReader *self, Obj *index, Snapshot *snapshot,
                 THROW(ERR, "Supplied snapshot objects must not be empty");
             }
             else {
-                CB_Inc_RefCount(target_snap_file);
+                Str_Inc_RefCount(target_snap_file);
             }
         }
         else {
@@ -382,8 +382,8 @@ PolyReader_do_open(PolyReader *self, Obj *index, Snapshot *snapshot,
         // Testing only.
         if (PolyReader_race_condition_debug1) {
             StackString *temp = SSTR_WRAP_STR("temp", 4);
-            if (Folder_Exists(folder, (CharBuf*)temp)) {
-                bool success = Folder_Rename(folder, (CharBuf*)temp,
+            if (Folder_Exists(folder, (String*)temp)) {
+                bool success = Folder_Rename(folder, (String*)temp,
                                                PolyReader_race_condition_debug1);
                 if (!success) { RETHROW(INCREF(Err_get_error())); }
             }
@@ -455,8 +455,8 @@ S_derive_folder(Obj *index) {
     if (Obj_Is_A(index, FOLDER)) {
         folder = (Folder*)INCREF(index);
     }
-    else if (Obj_Is_A(index, CHARBUF)) {
-        folder = (Folder*)FSFolder_new((CharBuf*)index);
+    else if (Obj_Is_A(index, STRING)) {
+        folder = (Folder*)FSFolder_new((String*)index);
     }
     else {
         THROW(ERR, "Invalid type for 'index': %o", Obj_Get_Class_Name(index));
@@ -478,7 +478,7 @@ S_obtain_deletion_lock(PolyReader *self) {
 }
 
 static bool
-S_obtain_read_lock(PolyReader *self, const CharBuf *snapshot_file_name) {
+S_obtain_read_lock(PolyReader *self, const String *snapshot_file_name) {
     PolyReaderIVARS *const ivars = PolyReader_IVARS(self);
     ivars->read_lock = IxManager_Make_Snapshot_Read_Lock(ivars->manager,
                                                          snapshot_file_name);

http://git-wip-us.apache.org/repos/asf/lucy/blob/2c3dbf15/core/Lucy/Index/PolyReader.cfh
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/PolyReader.cfh b/core/Lucy/Index/PolyReader.cfh
index 0f7439e..29f6088 100644
--- a/core/Lucy/Index/PolyReader.cfh
+++ b/core/Lucy/Index/PolyReader.cfh
@@ -56,7 +56,7 @@ public class Lucy::Index::PolyReader inherits Lucy::Index::IndexReader {
          Snapshot *snapshot = NULL, IndexManager *manager = NULL,
          VArray *sub_readers = NULL);
 
-    inert CharBuf* race_condition_debug1;
+    inert String* race_condition_debug1;
     inert int32_t  debug1_num_passes;
 
     /** Determine which sub-reader a document id belongs to.

http://git-wip-us.apache.org/repos/asf/lucy/blob/2c3dbf15/core/Lucy/Index/Posting.cfh
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/Posting.cfh b/core/Lucy/Index/Posting.cfh
index 5449c16..ac081fa 100644
--- a/core/Lucy/Index/Posting.cfh
+++ b/core/Lucy/Index/Posting.cfh
@@ -40,7 +40,7 @@ class Lucy::Index::Posting cnick Post inherits Lucy::Util::Stepper {
      */
     abstract incremented RawPosting*
     Read_Raw(Posting *self, InStream *instream, int32_t last_doc_id,
-             CharBuf *term_text, MemoryPool *mem_pool);
+             String *term_text, MemoryPool *mem_pool);
 
     /** Process an Inversion into RawPosting objects and add them all to the
      * supplied PostingPool.

http://git-wip-us.apache.org/repos/asf/lucy/blob/2c3dbf15/core/Lucy/Index/Posting/MatchPosting.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/Posting/MatchPosting.c b/core/Lucy/Index/Posting/MatchPosting.c
index 26f119d..c794660 100644
--- a/core/Lucy/Index/Posting/MatchPosting.c
+++ b/core/Lucy/Index/Posting/MatchPosting.c
@@ -96,10 +96,10 @@ MatchPost_Read_Record_IMP(MatchPosting *self, InStream *instream) {
 
 RawPosting*
 MatchPost_Read_Raw_IMP(MatchPosting *self, InStream *instream,
-                       int32_t last_doc_id, CharBuf *term_text,
+                       int32_t last_doc_id, String *term_text,
                        MemoryPool *mem_pool) {
-    char *const    text_buf  = (char*)CB_Get_Ptr8(term_text);
-    const size_t   text_size = CB_Get_Size(term_text);
+    char *const    text_buf  = (char*)Str_Get_Ptr8(term_text);
+    const size_t   text_size = Str_Get_Size(term_text);
     const uint32_t doc_code  = InStream_Read_C32(instream);
     const uint32_t delta_doc = doc_code >> 1;
     const int32_t  doc_id    = last_doc_id + delta_doc;
@@ -183,8 +183,8 @@ MatchPostWriter_init(MatchPostingWriter *self, Schema *schema,
                      Snapshot *snapshot, Segment *segment,
                      PolyReader *polyreader, int32_t field_num) {
     Folder  *folder = PolyReader_Get_Folder(polyreader);
-    CharBuf *filename
-        = CB_newf("%o/postings-%i32.dat", Seg_Get_Name(segment), field_num);
+    String *filename
+        = Str_newf("%o/postings-%i32.dat", Seg_Get_Name(segment), field_num);
     PostWriter_init((PostingWriter*)self, schema, snapshot, segment,
                     polyreader, field_num);
     MatchPostingWriterIVARS *const ivars = MatchPostWriter_IVARS(self);

http://git-wip-us.apache.org/repos/asf/lucy/blob/2c3dbf15/core/Lucy/Index/Posting/MatchPosting.cfh
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/Posting/MatchPosting.cfh b/core/Lucy/Index/Posting/MatchPosting.cfh
index 6c094ec..dcedd14 100644
--- a/core/Lucy/Index/Posting/MatchPosting.cfh
+++ b/core/Lucy/Index/Posting/MatchPosting.cfh
@@ -46,7 +46,7 @@ class Lucy::Index::Posting::MatchPosting cnick MatchPost
 
     incremented RawPosting*
     Read_Raw(MatchPosting *self, InStream *instream, int32_t last_doc_id,
-             CharBuf *term_text, MemoryPool *mem_pool);
+             String *term_text, MemoryPool *mem_pool);
 
     void
     Add_Inversion_To_Pool(MatchPosting *self, PostingPool *post_pool,

http://git-wip-us.apache.org/repos/asf/lucy/blob/2c3dbf15/core/Lucy/Index/Posting/RichPosting.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/Posting/RichPosting.c b/core/Lucy/Index/Posting/RichPosting.c
index 969d547..e50eb48 100644
--- a/core/Lucy/Index/Posting/RichPosting.c
+++ b/core/Lucy/Index/Posting/RichPosting.c
@@ -153,10 +153,10 @@ RichPost_Add_Inversion_To_Pool_IMP(RichPosting *self, PostingPool *post_pool,
 
 RawPosting*
 RichPost_Read_Raw_IMP(RichPosting *self, InStream *instream,
-                      int32_t last_doc_id, CharBuf *term_text,
+                      int32_t last_doc_id, String *term_text,
                       MemoryPool *mem_pool) {
-    char *const    text_buf       = (char*)CB_Get_Ptr8(term_text);
-    const size_t   text_size      = CB_Get_Size(term_text);
+    char *const    text_buf       = (char*)Str_Get_Ptr8(term_text);
+    const size_t   text_size      = Str_Get_Size(term_text);
     const uint32_t doc_code       = InStream_Read_C32(instream);
     const uint32_t delta_doc      = doc_code >> 1;
     const int32_t  doc_id         = last_doc_id + delta_doc;

http://git-wip-us.apache.org/repos/asf/lucy/blob/2c3dbf15/core/Lucy/Index/Posting/RichPosting.cfh
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/Posting/RichPosting.cfh b/core/Lucy/Index/Posting/RichPosting.cfh
index dcdcbb5..34e9035 100644
--- a/core/Lucy/Index/Posting/RichPosting.cfh
+++ b/core/Lucy/Index/Posting/RichPosting.cfh
@@ -47,7 +47,7 @@ class Lucy::Index::Posting::RichPosting cnick RichPost
 
     incremented RawPosting*
     Read_Raw(RichPosting *self, InStream *instream, int32_t last_doc_id,
-             CharBuf *term_text, MemoryPool *mem_pool);
+             String *term_text, MemoryPool *mem_pool);
 
     void
     Add_Inversion_To_Pool(RichPosting *self, PostingPool *post_pool,

http://git-wip-us.apache.org/repos/asf/lucy/blob/2c3dbf15/core/Lucy/Index/Posting/ScorePosting.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/Posting/ScorePosting.c b/core/Lucy/Index/Posting/ScorePosting.c
index 5b97a15..321065f 100644
--- a/core/Lucy/Index/Posting/ScorePosting.c
+++ b/core/Lucy/Index/Posting/ScorePosting.c
@@ -172,10 +172,10 @@ ScorePost_Read_Record_IMP(ScorePosting *self, InStream *instream) {
 
 RawPosting*
 ScorePost_Read_Raw_IMP(ScorePosting *self, InStream *instream,
-                       int32_t last_doc_id, CharBuf *term_text,
+                       int32_t last_doc_id, String *term_text,
                        MemoryPool *mem_pool) {
-    char *const    text_buf       = (char*)CB_Get_Ptr8(term_text);
-    const size_t   text_size      = CB_Get_Size(term_text);
+    char *const    text_buf       = (char*)Str_Get_Ptr8(term_text);
+    const size_t   text_size      = Str_Get_Size(term_text);
     const uint32_t doc_code       = InStream_Read_C32(instream);
     const uint32_t delta_doc      = doc_code >> 1;
     const int32_t  doc_id         = last_doc_id + delta_doc;

http://git-wip-us.apache.org/repos/asf/lucy/blob/2c3dbf15/core/Lucy/Index/Posting/ScorePosting.cfh
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/Posting/ScorePosting.cfh b/core/Lucy/Index/Posting/ScorePosting.cfh
index 18e313e..754123f 100644
--- a/core/Lucy/Index/Posting/ScorePosting.cfh
+++ b/core/Lucy/Index/Posting/ScorePosting.cfh
@@ -44,7 +44,7 @@ class Lucy::Index::Posting::ScorePosting cnick ScorePost
 
     incremented RawPosting*
     Read_Raw(ScorePosting *self, InStream *instream, int32_t last_doc_id,
-             CharBuf *term_text, MemoryPool *mem_pool);
+             String *term_text, MemoryPool *mem_pool);
 
     void
     Add_Inversion_To_Pool(ScorePosting *self, PostingPool *post_pool,

http://git-wip-us.apache.org/repos/asf/lucy/blob/2c3dbf15/core/Lucy/Index/PostingList.cfh
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/PostingList.cfh b/core/Lucy/Index/PostingList.cfh
index 36dc026..ac7d834 100644
--- a/core/Lucy/Index/PostingList.cfh
+++ b/core/Lucy/Index/PostingList.cfh
@@ -63,7 +63,7 @@ public class Lucy::Index::PostingList cnick PList
     /** Indexing helper function.
      */
     abstract RawPosting*
-    Read_Raw(PostingList *self, int32_t last_doc_id, CharBuf *term_text,
+    Read_Raw(PostingList *self, int32_t last_doc_id, String *term_text,
              MemoryPool *mem_pool);
 }
 

http://git-wip-us.apache.org/repos/asf/lucy/blob/2c3dbf15/core/Lucy/Index/PostingListReader.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/PostingListReader.c b/core/Lucy/Index/PostingListReader.c
index 89c4712..02ae03d 100644
--- a/core/Lucy/Index/PostingListReader.c
+++ b/core/Lucy/Index/PostingListReader.c
@@ -109,7 +109,7 @@ DefPListReader_Destroy_IMP(DefaultPostingListReader *self) {
 
 SegPostingList*
 DefPListReader_Posting_List_IMP(DefaultPostingListReader *self,
-                                const CharBuf *field, Obj *target) {
+                                const String *field, Obj *target) {
     DefaultPostingListReaderIVARS *const ivars = DefPListReader_IVARS(self);
     FieldType *type = Schema_Fetch_Type(ivars->schema, field);
 

http://git-wip-us.apache.org/repos/asf/lucy/blob/2c3dbf15/core/Lucy/Index/PostingListReader.cfh
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/PostingListReader.cfh b/core/Lucy/Index/PostingListReader.cfh
index b6d7455..8157af6 100644
--- a/core/Lucy/Index/PostingListReader.cfh
+++ b/core/Lucy/Index/PostingListReader.cfh
@@ -37,7 +37,7 @@ public class Lucy::Index::PostingListReader cnick PListReader
      * term using Seek().
      */
     public abstract incremented nullable PostingList*
-    Posting_List(PostingListReader *self, const CharBuf *field = NULL,
+    Posting_List(PostingListReader *self, const String *field = NULL,
                  Obj *term = NULL);
 
     abstract LexiconReader*
@@ -65,7 +65,7 @@ class Lucy::Index::DefaultPostingListReader cnick DefPListReader
          LexiconReader *lex_reader);
 
     public incremented nullable SegPostingList*
-    Posting_List(DefaultPostingListReader *self, const CharBuf *field = NULL,
+    Posting_List(DefaultPostingListReader *self, const String *field = NULL,
                  Obj *term = NULL);
 
     LexiconReader*

http://git-wip-us.apache.org/repos/asf/lucy/blob/2c3dbf15/core/Lucy/Index/PostingListWriter.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/PostingListWriter.c b/core/Lucy/Index/PostingListWriter.c
index 691255b..8685b77 100644
--- a/core/Lucy/Index/PostingListWriter.c
+++ b/core/Lucy/Index/PostingListWriter.c
@@ -82,10 +82,10 @@ S_lazy_init(PostingListWriter *self) {
     PostingListWriterIVARS *const ivars = PListWriter_IVARS(self);
     if (!ivars->lex_temp_out) {
         Folder  *folder         = ivars->folder;
-        CharBuf *seg_name       = Seg_Get_Name(ivars->segment);
-        CharBuf *lex_temp_path  = CB_newf("%o/lextemp", seg_name);
-        CharBuf *post_temp_path = CB_newf("%o/ptemp", seg_name);
-        CharBuf *skip_path      = CB_newf("%o/postings.skip", seg_name);
+        String *seg_name       = Seg_Get_Name(ivars->segment);
+        String *lex_temp_path  = Str_newf("%o/lextemp", seg_name);
+        String *post_temp_path = Str_newf("%o/ptemp", seg_name);
+        String *skip_path      = Str_newf("%o/postings.skip", seg_name);
 
         // Open temp streams and final skip stream.
         ivars->lex_temp_out  = Folder_Open_Out(folder, lex_temp_path);
@@ -106,7 +106,7 @@ S_lazy_init_posting_pool(PostingListWriter *self, int32_t field_num) {
     PostingListWriterIVARS *const ivars = PListWriter_IVARS(self);
     PostingPool *pool = (PostingPool*)VA_Fetch(ivars->pools, field_num);
     if (!pool && field_num != 0) {
-        CharBuf *field = Seg_Field_Name(ivars->segment, field_num);
+        String *field = Seg_Field_Name(ivars->segment, field_num);
         pool = PostPool_new(ivars->schema, ivars->snapshot, ivars->segment,
                             ivars->polyreader, field, ivars->lex_writer,
                             ivars->mem_pool, ivars->lex_temp_out,
@@ -186,7 +186,7 @@ PListWriter_Add_Segment_IMP(PostingListWriter *self, SegReader *reader,
     S_lazy_init(self);
 
     for (uint32_t i = 0, max = VA_Get_Size(all_fields); i < max; i++) {
-        CharBuf   *field = (CharBuf*)VA_Fetch(all_fields, i);
+        String    *field = (String*)VA_Fetch(all_fields, i);
         FieldType *type  = Schema_Fetch_Type(schema, field);
         int32_t old_field_num = Seg_Field_Num(other_segment, field);
         int32_t new_field_num = Seg_Field_Num(segment, field);
@@ -214,9 +214,9 @@ PListWriter_Finish_IMP(PostingListWriter *self) {
     if (!ivars->lex_temp_out) { return; }
 
     Folder  *folder = ivars->folder;
-    CharBuf *seg_name = Seg_Get_Name(ivars->segment);
-    CharBuf *lex_temp_path  = CB_newf("%o/lextemp", seg_name);
-    CharBuf *post_temp_path = CB_newf("%o/ptemp", seg_name);
+    String *seg_name = Seg_Get_Name(ivars->segment);
+    String *lex_temp_path  = Str_newf("%o/lextemp", seg_name);
+    String *post_temp_path = Str_newf("%o/ptemp", seg_name);
 
     // Close temp streams.
     OutStream_Close(ivars->lex_temp_out);

http://git-wip-us.apache.org/repos/asf/lucy/blob/2c3dbf15/core/Lucy/Index/PostingPool.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/PostingPool.c b/core/Lucy/Index/PostingPool.c
index 70f4de4..40bb9fc 100644
--- a/core/Lucy/Index/PostingPool.c
+++ b/core/Lucy/Index/PostingPool.c
@@ -58,7 +58,7 @@ S_write_terms_and_postings(PostingPool *self, PostingWriter *post_writer,
 
 PostingPool*
 PostPool_new(Schema *schema, Snapshot *snapshot, Segment *segment,
-             PolyReader *polyreader,  const CharBuf *field,
+             PolyReader *polyreader,  const String *field,
              LexiconWriter *lex_writer, MemoryPool *mem_pool,
              OutStream *lex_temp_out, OutStream *post_temp_out,
              OutStream *skip_out) {
@@ -70,7 +70,7 @@ PostPool_new(Schema *schema, Snapshot *snapshot, Segment *segment,
 
 PostingPool*
 PostPool_init(PostingPool *self, Schema *schema, Snapshot *snapshot,
-              Segment *segment, PolyReader *polyreader, const CharBuf *field,
+              Segment *segment, PolyReader *polyreader, const String *field,
               LexiconWriter *lex_writer, MemoryPool *mem_pool,
               OutStream *lex_temp_out, OutStream *post_temp_out,
               OutStream *skip_out) {
@@ -98,7 +98,7 @@ PostPool_init(PostingPool *self, Schema *schema, Snapshot *snapshot,
     ivars->polyreader     = (PolyReader*)INCREF(polyreader);
     ivars->lex_writer     = (LexiconWriter*)INCREF(lex_writer);
     ivars->mem_pool       = (MemoryPool*)INCREF(mem_pool);
-    ivars->field          = CB_Clone(field);
+    ivars->field          = Str_Clone(field);
     ivars->lex_temp_out   = (OutStream*)INCREF(lex_temp_out);
     ivars->post_temp_out  = (OutStream*)INCREF(post_temp_out);
     ivars->skip_out       = (OutStream*)INCREF(skip_out);
@@ -174,9 +174,9 @@ PostPool_Flip_IMP(PostingPool *self) {
 
     if (num_runs) {
         Folder  *folder = PolyReader_Get_Folder(ivars->polyreader);
-        CharBuf *seg_name = Seg_Get_Name(ivars->segment);
-        CharBuf *lex_temp_path  = CB_newf("%o/lextemp", seg_name);
-        CharBuf *post_temp_path = CB_newf("%o/ptemp", seg_name);
+        String *seg_name = Seg_Get_Name(ivars->segment);
+        String *lex_temp_path  = Str_newf("%o/lextemp", seg_name);
+        String *post_temp_path = Str_newf("%o/ptemp", seg_name);
         ivars->lex_temp_in = Folder_Open_In(folder, lex_temp_path);
         if (!ivars->lex_temp_in) {
             RETHROW(INCREF(Err_get_error()));
@@ -363,7 +363,7 @@ S_write_terms_and_postings(PostingPool *self, PostingWriter *post_writer,
     TermInfo      *const skip_tinfo       = TInfo_new(0);
     TermInfoIVARS *const tinfo_ivars      = TInfo_IVARS(tinfo);
     TermInfoIVARS *const skip_tinfo_ivars = TInfo_IVARS(skip_tinfo);
-    CharBuf       *const last_term_text   = CB_new(0);
+    String        *const last_term_text   = Str_new(0);
     LexiconWriter *const lex_writer       = ivars->lex_writer;
     SkipStepper   *const skip_stepper     = ivars->skip_stepper;
     SkipStepperIVARS *const skip_stepper_ivars
@@ -378,9 +378,9 @@ S_write_terms_and_postings(PostingPool *self, PostingWriter *post_writer,
                               (*(RawPosting**)PostPool_Fetch(self)),
                               RAWPOSTING);
     RawPostingIVARS *post_ivars = RawPost_IVARS(posting);
-    CB_Mimic_Str(last_term_text, post_ivars->blob, post_ivars->content_len);
-    char *last_text_buf = (char*)CB_Get_Ptr8(last_term_text);
-    uint32_t last_text_size = CB_Get_Size(last_term_text);
+    Str_Mimic_Str(last_term_text, post_ivars->blob, post_ivars->content_len);
+    char *last_text_buf = (char*)Str_Get_Ptr8(last_term_text);
+    uint32_t last_text_size = Str_Get_Size(last_term_text);
     SkipStepper_Set_ID_And_Filepos(skip_stepper, 0, 0);
 
     // Initialize sentinel to be used on the last iter, using an empty string
@@ -426,10 +426,10 @@ S_write_terms_and_postings(PostingPool *self, PostingWriter *post_writer,
             last_skip_filepos     = tinfo_ivars->post_filepos;
 
             // Remember the term_text so we can write string diffs.
-            CB_Mimic_Str(last_term_text, post_ivars->blob,
+            Str_Mimic_Str(last_term_text, post_ivars->blob,
                          post_ivars->content_len);
-            last_text_buf  = (char*)CB_Get_Ptr8(last_term_text);
-            last_text_size = CB_Get_Size(last_term_text);
+            last_text_buf  = (char*)Str_Get_Ptr8(last_term_text);
+            last_text_size = Str_Get_Size(last_term_text);
         }
 
         // Bail on last iter before writing invalid posting data.
@@ -486,10 +486,10 @@ PostPool_Refill_IMP(PostingPool *self) {
     const uint32_t     mem_thresh  = ivars->mem_thresh;
     const int32_t      doc_base    = ivars->doc_base;
     uint32_t           num_elems   = 0; // number of items recovered
-    CharBuf           *term_text   = NULL;
+    String            *term_text   = NULL;
 
     if (ivars->lexicon == NULL) { return 0; }
-    else { term_text = (CharBuf*)Lex_Get_Term(lexicon); }
+    else { term_text = (String*)Lex_Get_Term(lexicon); }
 
     // Make sure cache is empty.
     if (ivars->cache_max - ivars->cache_tick > 0) {
@@ -511,9 +511,9 @@ PostPool_Refill_IMP(PostingPool *self) {
             // Read a term.
             if (Lex_Next(lexicon)) {
                 ivars->post_count = Lex_Doc_Freq(lexicon);
-                term_text = (CharBuf*)Lex_Get_Term(lexicon);
-                if (term_text && !Obj_Is_A((Obj*)term_text, CHARBUF)) {
-                    THROW(ERR, "Only CharBuf terms are supported for now");
+                term_text = (String*)Lex_Get_Term(lexicon);
+                if (term_text && !Obj_Is_A((Obj*)term_text, STRING)) {
+                    THROW(ERR, "Only String terms are supported for now");
                 }
                 Posting *posting = PList_Get_Posting(plist);
                 Post_Set_Doc_ID(posting, doc_base);
@@ -587,7 +587,7 @@ S_fresh_flip(PostingPool *self, InStream *lex_temp_in,
     if (ivars->lex_end == 0) { return; }
 
     // Get a Lexicon.
-    CharBuf *lex_alias = CB_newf("%o-%i64-to-%i64",
+    String *lex_alias = Str_newf("%o-%i64-to-%i64",
                                  InStream_Get_Filename(lex_temp_in),
                                  ivars->lex_start, ivars->lex_end);
     InStream *lex_temp_in_dupe = InStream_Reopen(
@@ -600,8 +600,8 @@ S_fresh_flip(PostingPool *self, InStream *lex_temp_in,
     DECREF(lex_temp_in_dupe);
 
     // Get a PostingList.
-    CharBuf *post_alias
-        = CB_newf("%o-%i64-to-%i64", InStream_Get_Filename(post_temp_in),
+    String *post_alias
+        = Str_newf("%o-%i64-to-%i64", InStream_Get_Filename(post_temp_in),
                   ivars->post_start, ivars->post_end);
     InStream *post_temp_in_dupe
         = InStream_Reopen(post_temp_in, post_alias, ivars->post_start,

http://git-wip-us.apache.org/repos/asf/lucy/blob/2c3dbf15/core/Lucy/Index/PostingPool.cfh
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/PostingPool.cfh b/core/Lucy/Index/PostingPool.cfh
index b4f2614..570ec15 100644
--- a/core/Lucy/Index/PostingPool.cfh
+++ b/core/Lucy/Index/PostingPool.cfh
@@ -26,7 +26,7 @@ class Lucy::Index::PostingPool cnick PostPool
     Snapshot          *snapshot;
     Segment           *segment;
     PolyReader        *polyreader;
-    CharBuf           *field;
+    String            *field;
     LexiconWriter     *lex_writer;
     Lexicon           *lexicon;
     PostingList       *plist;
@@ -51,14 +51,14 @@ class Lucy::Index::PostingPool cnick PostPool
 
     inert incremented PostingPool*
     new(Schema *schema, Snapshot *snapshot, Segment *segment,
-        PolyReader *polyreader, const CharBuf *field,
+        PolyReader *polyreader, const String *field,
         LexiconWriter *lex_writer, MemoryPool *mem_pool,
         OutStream *lex_temp_out, OutStream *post_temp_out,
         OutStream *skip_out);
 
     inert PostingPool*
     init(PostingPool *self, Schema *schema, Snapshot *snapshot,
-         Segment *segment, PolyReader *polyreader, const CharBuf *field,
+         Segment *segment, PolyReader *polyreader, const String *field,
          LexiconWriter *lex_writer, MemoryPool *mem_pool,
          OutStream *lex_temp_out, OutStream *post_temp_out,
          OutStream *skip_out);

http://git-wip-us.apache.org/repos/asf/lucy/blob/2c3dbf15/core/Lucy/Index/RawLexicon.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/RawLexicon.c b/core/Lucy/Index/RawLexicon.c
index 242ba99..66b4336 100644
--- a/core/Lucy/Index/RawLexicon.c
+++ b/core/Lucy/Index/RawLexicon.c
@@ -26,14 +26,14 @@
 #include "Lucy/Store/InStream.h"
 
 RawLexicon*
-RawLex_new(Schema *schema, const CharBuf *field, InStream *instream,
+RawLex_new(Schema *schema, const String *field, InStream *instream,
            int64_t start, int64_t end) {
     RawLexicon *self = (RawLexicon*)VTable_Make_Obj(RAWLEXICON);
     return RawLex_init(self, schema, field, instream, start, end);
 }
 
 RawLexicon*
-RawLex_init(RawLexicon *self, Schema *schema, const CharBuf *field,
+RawLex_init(RawLexicon *self, Schema *schema, const String *field,
             InStream *instream, int64_t start, int64_t end) {
     FieldType *type = Schema_Fetch_Type(schema, field);
     Lex_init((Lexicon*)self, field);

http://git-wip-us.apache.org/repos/asf/lucy/blob/2c3dbf15/core/Lucy/Index/RawLexicon.cfh
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/RawLexicon.cfh b/core/Lucy/Index/RawLexicon.cfh
index 30d11cb..59b5f36 100644
--- a/core/Lucy/Index/RawLexicon.cfh
+++ b/core/Lucy/Index/RawLexicon.cfh
@@ -29,11 +29,11 @@ class Lucy::Index::RawLexicon cnick RawLex
     int64_t        len;
 
     inert incremented RawLexicon*
-    new(Schema *schema, const CharBuf *field, InStream *instream,
+    new(Schema *schema, const String *field, InStream *instream,
         int64_t start, int64_t end);
 
     inert RawLexicon*
-    init(RawLexicon *self, Schema *schema, const CharBuf *field,
+    init(RawLexicon *self, Schema *schema, const String *field,
          InStream *instream, int64_t start, int64_t end);
 
     public void

http://git-wip-us.apache.org/repos/asf/lucy/blob/2c3dbf15/core/Lucy/Index/RawPostingList.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/RawPostingList.c b/core/Lucy/Index/RawPostingList.c
index 0620a2d..8518893 100644
--- a/core/Lucy/Index/RawPostingList.c
+++ b/core/Lucy/Index/RawPostingList.c
@@ -26,14 +26,14 @@
 #include "Lucy/Util/MemoryPool.h"
 
 RawPostingList*
-RawPList_new(Schema *schema, const CharBuf *field, InStream *instream,
+RawPList_new(Schema *schema, const String *field, InStream *instream,
              int64_t start, int64_t end) {
     RawPostingList *self = (RawPostingList*)VTable_Make_Obj(RAWPOSTINGLIST);
     return RawPList_init(self, schema, field, instream, start, end);
 }
 
 RawPostingList*
-RawPList_init(RawPostingList *self, Schema *schema, const CharBuf *field,
+RawPList_init(RawPostingList *self, Schema *schema, const String *field,
               InStream *instream, int64_t start, int64_t end) {
     PList_init((PostingList*)self);
     RawPostingListIVARS *const ivars = RawPList_IVARS(self);
@@ -62,7 +62,7 @@ RawPList_Get_Posting_IMP(RawPostingList *self) {
 
 RawPosting*
 RawPList_Read_Raw_IMP(RawPostingList *self, int32_t last_doc_id,
-                      CharBuf *term_text, MemoryPool *mem_pool) {
+                      String *term_text, MemoryPool *mem_pool) {
     RawPostingListIVARS *const ivars = RawPList_IVARS(self);
     return Post_Read_Raw(ivars->posting, ivars->instream,
                          last_doc_id, term_text, mem_pool);

http://git-wip-us.apache.org/repos/asf/lucy/blob/2c3dbf15/core/Lucy/Index/RawPostingList.cfh
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/RawPostingList.cfh b/core/Lucy/Index/RawPostingList.cfh
index 423a36c..37da0e9 100644
--- a/core/Lucy/Index/RawPostingList.cfh
+++ b/core/Lucy/Index/RawPostingList.cfh
@@ -26,18 +26,18 @@ class Lucy::Index::RawPostingList cnick RawPList
     int64_t        len;
 
     inert incremented RawPostingList*
-    new(Schema *schema, const CharBuf *field, InStream *instream,
+    new(Schema *schema, const String *field, InStream *instream,
         int64_t start, int64_t end);
 
     inert RawPostingList*
-    init(RawPostingList *self, Schema *schema, const CharBuf *field,
+    init(RawPostingList *self, Schema *schema, const String *field,
          InStream *instream, int64_t lex_start, int64_t lex_end);
 
     public void
     Destroy(RawPostingList *self);
 
     RawPosting*
-    Read_Raw(RawPostingList *self, int32_t last_doc_id, CharBuf *term_text,
+    Read_Raw(RawPostingList *self, int32_t last_doc_id, String *term_text,
              MemoryPool *mem_pool);
 
     Posting*

http://git-wip-us.apache.org/repos/asf/lucy/blob/2c3dbf15/core/Lucy/Index/SegLexicon.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/SegLexicon.c b/core/Lucy/Index/SegLexicon.c
index c3dec6f..ab52dc2 100644
--- a/core/Lucy/Index/SegLexicon.c
+++ b/core/Lucy/Index/SegLexicon.c
@@ -38,24 +38,24 @@ S_scan_to(SegLexicon *self, Obj *target);
 
 SegLexicon*
 SegLex_new(Schema *schema, Folder *folder, Segment *segment,
-           const CharBuf *field) {
+           const String *field) {
     SegLexicon *self = (SegLexicon*)VTable_Make_Obj(SEGLEXICON);
     return SegLex_init(self, schema, folder, segment, field);
 }
 
 SegLexicon*
 SegLex_init(SegLexicon *self, Schema *schema, Folder *folder,
-            Segment *segment, const CharBuf *field) {
+            Segment *segment, const String *field) {
     Hash *metadata = (Hash*)CERTIFY(
                          Seg_Fetch_Metadata_Str(segment, "lexicon", 7),
                          HASH);
     Architecture *arch      = Schema_Get_Architecture(schema);
     Hash         *counts    = (Hash*)Hash_Fetch_Str(metadata, "counts", 6);
     Obj          *format    = Hash_Fetch_Str(metadata, "format", 6);
-    CharBuf      *seg_name  = Seg_Get_Name(segment);
+    String       *seg_name  = Seg_Get_Name(segment);
     int32_t       field_num = Seg_Field_Num(segment, field);
     FieldType    *type      = Schema_Fetch_Type(schema, field);
-    CharBuf *filename = CB_newf("%o/lexicon-%i32.dat", seg_name, field_num);
+    String *filename = Str_newf("%o/lexicon-%i32.dat", seg_name, field_num);
 
     Lex_init((Lexicon*)self, field);
     SegLexiconIVARS *const ivars = SegLex_IVARS(self);

http://git-wip-us.apache.org/repos/asf/lucy/blob/2c3dbf15/core/Lucy/Index/SegLexicon.cfh
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/SegLexicon.cfh b/core/Lucy/Index/SegLexicon.cfh
index fe7e68a..c710735 100644
--- a/core/Lucy/Index/SegLexicon.cfh
+++ b/core/Lucy/Index/SegLexicon.cfh
@@ -41,11 +41,11 @@ class Lucy::Index::SegLexicon cnick SegLex
      */
     inert incremented SegLexicon*
     new(Schema *schema, Folder *folder, Segment *segment,
-        const CharBuf *field);
+        const String *field);
 
     inert SegLexicon*
     init(SegLexicon *self, Schema *schema, Folder *folder, Segment *segment,
-         const CharBuf *field);
+         const String *field);
 
     nullable TermInfo*
     Get_Term_Info(SegLexicon *self);

http://git-wip-us.apache.org/repos/asf/lucy/blob/2c3dbf15/core/Lucy/Index/SegPostingList.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/SegPostingList.c b/core/Lucy/Index/SegPostingList.c
index ae07958..71b344d 100644
--- a/core/Lucy/Index/SegPostingList.c
+++ b/core/Lucy/Index/SegPostingList.c
@@ -43,24 +43,24 @@ static void
 S_seek_tinfo(SegPostingList *self, TermInfo *tinfo);
 
 SegPostingList*
-SegPList_new(PostingListReader *plist_reader, const CharBuf *field) {
+SegPList_new(PostingListReader *plist_reader, const String *field) {
     SegPostingList *self = (SegPostingList*)VTable_Make_Obj(SEGPOSTINGLIST);
     return SegPList_init(self, plist_reader, field);
 }
 
 SegPostingList*
 SegPList_init(SegPostingList *self, PostingListReader *plist_reader,
-              const CharBuf *field) {
+              const String *field) {
     SegPostingListIVARS *const ivars = SegPList_IVARS(self);
     Schema       *const schema   = PListReader_Get_Schema(plist_reader);
     Folder       *const folder   = PListReader_Get_Folder(plist_reader);
     Segment      *const segment  = PListReader_Get_Segment(plist_reader);
     Architecture *const arch     = Schema_Get_Architecture(schema);
-    CharBuf      *const seg_name = Seg_Get_Name(segment);
+    String       *const seg_name = Seg_Get_Name(segment);
     int32_t       field_num      = Seg_Field_Num(segment, field);
-    CharBuf      *post_file      = CB_newf("%o/postings-%i32.dat",
+    String       *post_file      = Str_newf("%o/postings-%i32.dat",
                                            seg_name, field_num);
-    CharBuf      *skip_file      = CB_newf("%o/postings.skip", seg_name);
+    String       *skip_file      = Str_newf("%o/postings.skip", seg_name);
 
     // Init.
     ivars->doc_freq        = 0;
@@ -73,7 +73,7 @@ SegPList_init(SegPostingList *self, PostingListReader *plist_reader,
 
     // Assign.
     ivars->plist_reader    = (PostingListReader*)INCREF(plist_reader);
-    ivars->field           = CB_Clone(field);
+    ivars->field           = Str_Clone(field);
     ivars->skip_interval   = Arch_Skip_Interval(arch);
 
     // Derive.
@@ -310,7 +310,7 @@ SegPList_Make_Matcher_IMP(SegPostingList *self, Similarity *sim,
 
 RawPosting*
 SegPList_Read_Raw_IMP(SegPostingList *self, int32_t last_doc_id,
-                      CharBuf *term_text, MemoryPool *mem_pool) {
+                      String *term_text, MemoryPool *mem_pool) {
     SegPostingListIVARS *const ivars = SegPList_IVARS(self);
     return Post_Read_Raw(ivars->posting, ivars->post_stream,
                          last_doc_id, term_text, mem_pool);

http://git-wip-us.apache.org/repos/asf/lucy/blob/2c3dbf15/core/Lucy/Index/SegPostingList.cfh
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/SegPostingList.cfh b/core/Lucy/Index/SegPostingList.cfh
index dfb23ea..0213dde 100644
--- a/core/Lucy/Index/SegPostingList.cfh
+++ b/core/Lucy/Index/SegPostingList.cfh
@@ -23,7 +23,7 @@ class Lucy::Index::SegPostingList cnick SegPList
     inherits Lucy::Index::PostingList {
 
     PostingListReader *plist_reader;
-    CharBuf           *field;
+    String            *field;
     Posting           *posting;
     InStream          *post_stream;
     InStream          *skip_stream;
@@ -36,11 +36,11 @@ class Lucy::Index::SegPostingList cnick SegPList
     int32_t            field_num;
 
     inert incremented SegPostingList*
-    new(PostingListReader *plist_reader, const CharBuf *field);
+    new(PostingListReader *plist_reader, const String *field);
 
     inert SegPostingList*
     init(SegPostingList *self, PostingListReader *plist_reader,
-         const CharBuf *field);
+         const String *field);
 
     InStream*
     Get_Post_Stream(SegPostingList *self);
@@ -79,7 +79,7 @@ class Lucy::Index::SegPostingList cnick SegPList
                  Compiler *compiler, bool need_score);
 
     RawPosting*
-    Read_Raw(SegPostingList *self, int32_t last_doc_id, CharBuf *term_text,
+    Read_Raw(SegPostingList *self, int32_t last_doc_id, String *term_text,
              MemoryPool *mem_pool);
 }
 

http://git-wip-us.apache.org/repos/asf/lucy/blob/2c3dbf15/core/Lucy/Index/SegReader.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/SegReader.c b/core/Lucy/Index/SegReader.c
index 4332ff7..3a7db4c 100644
--- a/core/Lucy/Index/SegReader.c
+++ b/core/Lucy/Index/SegReader.c
@@ -51,7 +51,7 @@ SegReader_init(SegReader *self, Schema *schema, Folder *folder,
     segment = SegReader_Get_Segment(self);
 
     ivars->doc_max    = (int32_t)Seg_Get_Count(segment);
-    ivars->seg_name   = (CharBuf*)INCREF(Seg_Get_Name(segment));
+    ivars->seg_name   = (String*)INCREF(Seg_Get_Name(segment));
     ivars->seg_num    = Seg_Get_Number(segment);
     Err *error = Err_trap(S_try_init_components, self);
     if (error) {
@@ -84,7 +84,7 @@ SegReader_Destroy_IMP(SegReader *self) {
 }
 
 void
-SegReader_Register_IMP(SegReader *self, const CharBuf *api,
+SegReader_Register_IMP(SegReader *self, const String *api,
                        DataReader *component) {
     SegReaderIVARS *const ivars = SegReader_IVARS(self);
     if (Hash_Fetch(ivars->components, (Obj*)api)) {
@@ -94,7 +94,7 @@ SegReader_Register_IMP(SegReader *self, const CharBuf *api,
     Hash_Store(ivars->components, (Obj*)api, (Obj*)component);
 }
 
-CharBuf*
+String*
 SegReader_Get_Seg_Name_IMP(SegReader *self) {
     return SegReader_IVARS(self)->seg_name;
 }

http://git-wip-us.apache.org/repos/asf/lucy/blob/2c3dbf15/core/Lucy/Index/SegReader.cfh
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/SegReader.cfh b/core/Lucy/Index/SegReader.cfh
index 456aace..2e08e84 100644
--- a/core/Lucy/Index/SegReader.cfh
+++ b/core/Lucy/Index/SegReader.cfh
@@ -33,7 +33,7 @@ public class Lucy::Index::SegReader inherits Lucy::Index::IndexReader {
     int32_t  doc_max;
     int32_t  del_count;
     int64_t  seg_num;
-    CharBuf *seg_name;
+    String *seg_name;
 
     inert incremented SegReader*
     new(Schema *schema, Folder *folder, Snapshot *snapshot = NULL,
@@ -63,12 +63,12 @@ public class Lucy::Index::SegReader inherits Lucy::Index::IndexReader {
      * @param component A DataReader.
      */
     public void
-    Register(SegReader *self, const CharBuf *api,
+    Register(SegReader *self, const String *api,
              decremented DataReader *component);
 
     /** Return the name of the segment.
      */
-    public CharBuf*
+    public String*
     Get_Seg_Name(SegReader *self);
 
     /** Return the number of the segment.

http://git-wip-us.apache.org/repos/asf/lucy/blob/2c3dbf15/core/Lucy/Index/SegWriter.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/SegWriter.c b/core/Lucy/Index/SegWriter.c
index 05c9220..d06c050 100644
--- a/core/Lucy/Index/SegWriter.c
+++ b/core/Lucy/Index/SegWriter.c
@@ -61,7 +61,7 @@ SegWriter_Destroy_IMP(SegWriter *self) {
 }
 
 void
-SegWriter_Register_IMP(SegWriter *self, const CharBuf *api,
+SegWriter_Register_IMP(SegWriter *self, const String *api,
                        DataWriter *component) {
     SegWriterIVARS *const ivars = SegWriter_IVARS(self);
     CERTIFY(component, DATAWRITER);
@@ -72,7 +72,7 @@ SegWriter_Register_IMP(SegWriter *self, const CharBuf *api,
 }
 
 Obj*
-SegWriter_Fetch_IMP(SegWriter *self, const CharBuf *api) {
+SegWriter_Fetch_IMP(SegWriter *self, const String *api) {
     SegWriterIVARS *const ivars = SegWriter_IVARS(self);
     return Hash_Fetch(ivars->by_api, (Obj*)api);
 }
@@ -87,7 +87,7 @@ void
 SegWriter_Prep_Seg_Dir_IMP(SegWriter *self) {
     SegWriterIVARS *const ivars = SegWriter_IVARS(self);
     Folder  *folder   = SegWriter_Get_Folder(self);
-    CharBuf *seg_name = Seg_Get_Name(ivars->segment);
+    String *seg_name = Seg_Get_Name(ivars->segment);
 
     // Clear stale segment files from crashed indexing sessions.
     if (Folder_Exists(folder, seg_name)) {
@@ -158,7 +158,7 @@ SegWriter_Merge_Segment_IMP(SegWriter *self, SegReader *reader,
                             I32Array *doc_map) {
     SegWriterIVARS *const ivars = SegWriter_IVARS(self);
     Snapshot *snapshot = SegWriter_Get_Snapshot(self);
-    CharBuf  *seg_name = Seg_Get_Name(SegReader_Get_Segment(reader));
+    String   *seg_name = Seg_Get_Name(SegReader_Get_Segment(reader));
 
     // Have all the sub-writers merge the segment.
     for (uint32_t i = 0, max = VA_Get_Size(ivars->writers); i < max; i++) {
@@ -178,7 +178,7 @@ void
 SegWriter_Delete_Segment_IMP(SegWriter *self, SegReader *reader) {
     SegWriterIVARS *const ivars = SegWriter_IVARS(self);
     Snapshot *snapshot = SegWriter_Get_Snapshot(self);
-    CharBuf  *seg_name = Seg_Get_Name(SegReader_Get_Segment(reader));
+    String   *seg_name = Seg_Get_Name(SegReader_Get_Segment(reader));
 
     // Have all the sub-writers delete the segment.
     for (uint32_t i = 0, max = VA_Get_Size(ivars->writers); i < max; i++) {
@@ -194,7 +194,7 @@ SegWriter_Delete_Segment_IMP(SegWriter *self, SegReader *reader) {
 void
 SegWriter_Finish_IMP(SegWriter *self) {
     SegWriterIVARS *const ivars = SegWriter_IVARS(self);
-    CharBuf *seg_name = Seg_Get_Name(ivars->segment);
+    String *seg_name = Seg_Get_Name(ivars->segment);
 
     // Finish off children.
     for (uint32_t i = 0, max = VA_Get_Size(ivars->writers); i < max; i++) {
@@ -204,7 +204,7 @@ SegWriter_Finish_IMP(SegWriter *self) {
 
     // Write segment metadata and add the segment directory to the snapshot.
     Snapshot *snapshot = SegWriter_Get_Snapshot(self);
-    CharBuf *segmeta_filename = CB_newf("%o/segmeta.json", seg_name);
+    String *segmeta_filename = Str_newf("%o/segmeta.json", seg_name);
     Seg_Write_File(ivars->segment, ivars->folder);
     Snapshot_Add_Entry(snapshot, seg_name);
     DECREF(segmeta_filename);

http://git-wip-us.apache.org/repos/asf/lucy/blob/2c3dbf15/core/Lucy/Index/SegWriter.cfh
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/SegWriter.cfh b/core/Lucy/Index/SegWriter.cfh
index e25a041..868f869 100644
--- a/core/Lucy/Index/SegWriter.cfh
+++ b/core/Lucy/Index/SegWriter.cfh
@@ -65,7 +65,7 @@ public class Lucy::Index::SegWriter inherits Lucy::Index::DataWriter {
      * @param component A DataWriter.
      */
     public void
-    Register(SegWriter *self, const CharBuf *api,
+    Register(SegWriter *self, const String *api,
              decremented DataWriter *component);
 
     /** Retrieve a registered component.
@@ -74,7 +74,7 @@ public class Lucy::Index::SegWriter inherits Lucy::Index::DataWriter {
      * implements.
      */
     public nullable Obj*
-    Fetch(SegWriter *self, const CharBuf *api);
+    Fetch(SegWriter *self, const String *api);
 
     /** Add a DataWriter to the SegWriter's stack of writers.
      */

http://git-wip-us.apache.org/repos/asf/lucy/blob/2c3dbf15/core/Lucy/Index/Segment.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/Segment.c b/core/Lucy/Index/Segment.c
index 23681c4..b80bf78 100644
--- a/core/Lucy/Index/Segment.c
+++ b/core/Lucy/Index/Segment.c
@@ -45,7 +45,7 @@ Seg_init(Segment *self, int64_t number) {
     ivars->by_name   = Hash_new(0);
 
     // Start field numbers at 1, not 0.
-    VA_Push(ivars->by_num, (Obj*)CB_newf(""));
+    VA_Push(ivars->by_num, (Obj*)Str_newf(""));
 
     // Assign.
     ivars->number = number;
@@ -56,16 +56,16 @@ Seg_init(Segment *self, int64_t number) {
     return self;
 }
 
-CharBuf*
+String*
 Seg_num_to_name(int64_t number) {
     char base36[StrHelp_MAX_BASE36_BYTES];
     StrHelp_to_base36(number, &base36);
-    return CB_newf("seg_%s", &base36);
+    return Str_newf("seg_%s", &base36);
 }
 
 bool
-Seg_valid_seg_name(const CharBuf *name) {
-    if (CB_Starts_With_Str(name, "seg_", 4)) {
+Seg_valid_seg_name(const String *name) {
+    if (Str_Starts_With_Str(name, "seg_", 4)) {
         StackString *scratch = SSTR_WRAP(name);
         SStr_Nip(scratch, 4);
         uint32_t code_point;
@@ -90,7 +90,7 @@ Seg_Destroy_IMP(Segment *self) {
 bool
 Seg_Read_File_IMP(Segment *self, Folder *folder) {
     SegmentIVARS *const ivars = Seg_IVARS(self);
-    CharBuf *filename = CB_newf("%o/segmeta.json", ivars->name);
+    String *filename = Str_newf("%o/segmeta.json", ivars->name);
     Hash    *metadata = (Hash*)Json_slurp_json(folder, filename);
     Hash    *my_metadata;
 
@@ -127,7 +127,7 @@ Seg_Read_File_IMP(Segment *self, Folder *folder) {
 
     // Copy the list of fields from the source.
     for (uint32_t i = 0; i < num_fields; i++) {
-        CharBuf *name = (CharBuf*)VA_Fetch(source_by_num, i);
+        String *name = (String*)VA_Fetch(source_by_num, i);
         Seg_Add_Field(self, name);
     }
 
@@ -141,20 +141,20 @@ Seg_Write_File_IMP(Segment *self, Folder *folder) {
 
     // Store metadata specific to this Segment object.
     Hash_Store_Str(my_metadata, "count", 5,
-                   (Obj*)CB_newf("%i64", ivars->count));
-    Hash_Store_Str(my_metadata, "name", 4, (Obj*)CB_Clone(ivars->name));
+                   (Obj*)Str_newf("%i64", ivars->count));
+    Hash_Store_Str(my_metadata, "name", 4, (Obj*)Str_Clone(ivars->name));
     Hash_Store_Str(my_metadata, "field_names", 11, INCREF(ivars->by_num));
-    Hash_Store_Str(my_metadata, "format", 6, (Obj*)CB_newf("%i32", 1));
+    Hash_Store_Str(my_metadata, "format", 6, (Obj*)Str_newf("%i32", 1));
     Hash_Store_Str(ivars->metadata, "segmeta", 7, (Obj*)my_metadata);
 
-    CharBuf *filename = CB_newf("%o/segmeta.json", ivars->name);
+    String *filename = Str_newf("%o/segmeta.json", ivars->name);
     bool result = Json_spew_json((Obj*)ivars->metadata, folder, filename);
     DECREF(filename);
     if (!result) { RETHROW(INCREF(Err_get_error())); }
 }
 
 int32_t
-Seg_Add_Field_IMP(Segment *self, const CharBuf *field) {
+Seg_Add_Field_IMP(Segment *self, const String *field) {
     SegmentIVARS *const ivars = Seg_IVARS(self);
     Integer32 *num = (Integer32*)Hash_Fetch(ivars->by_name, (Obj*)field);
     if (num) {
@@ -163,12 +163,12 @@ Seg_Add_Field_IMP(Segment *self, const CharBuf *field) {
     else {
         int32_t field_num = VA_Get_Size(ivars->by_num);
         Hash_Store(ivars->by_name, (Obj*)field, (Obj*)Int32_new(field_num));
-        VA_Push(ivars->by_num, (Obj*)CB_Clone(field));
+        VA_Push(ivars->by_num, (Obj*)Str_Clone(field));
         return field_num;
     }
 }
 
-CharBuf*
+String*
 Seg_Get_Name_IMP(Segment *self) {
     return Seg_IVARS(self)->name;
 }
@@ -196,7 +196,7 @@ Seg_Increment_Count_IMP(Segment *self, int64_t increment) {
 }
 
 void
-Seg_Store_Metadata_IMP(Segment *self, const CharBuf *key, Obj *value) {
+Seg_Store_Metadata_IMP(Segment *self, const String *key, Obj *value) {
     SegmentIVARS *const ivars = Seg_IVARS(self);
     if (Hash_Fetch(ivars->metadata, (Obj*)key)) {
         THROW(ERR, "Metadata key '%o' already registered", key);
@@ -208,11 +208,11 @@ void
 Seg_Store_Metadata_Str_IMP(Segment *self, const char *key, size_t key_len,
                            Obj *value) {
     StackString *k = SSTR_WRAP_STR((char*)key, key_len);
-    Seg_Store_Metadata(self, (CharBuf*)k, value);
+    Seg_Store_Metadata(self, (String*)k, value);
 }
 
 Obj*
-Seg_Fetch_Metadata_IMP(Segment *self, const CharBuf *key) {
+Seg_Fetch_Metadata_IMP(Segment *self, const String *key) {
     SegmentIVARS *const ivars = Seg_IVARS(self);
     return Hash_Fetch(ivars->metadata, (Obj*)key);
 }
@@ -238,16 +238,16 @@ Seg_Compare_To_IMP(Segment *self, Obj *other) {
     else                                     { return 1;  }
 }
 
-CharBuf*
+String*
 Seg_Field_Name_IMP(Segment *self, int32_t field_num) {
     SegmentIVARS *const ivars = Seg_IVARS(self);
     return field_num
-           ? (CharBuf*)VA_Fetch(ivars->by_num, field_num)
+           ? (String*)VA_Fetch(ivars->by_num, field_num)
            : NULL;
 }
 
 int32_t
-Seg_Field_Num_IMP(Segment *self, const CharBuf *field) {
+Seg_Field_Num_IMP(Segment *self, const String *field) {
     if (field == NULL) {
         return 0;
     }

http://git-wip-us.apache.org/repos/asf/lucy/blob/2c3dbf15/core/Lucy/Index/Segment.cfh
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/Segment.cfh b/core/Lucy/Index/Segment.cfh
index faccd2d..3231938 100644
--- a/core/Lucy/Index/Segment.cfh
+++ b/core/Lucy/Index/Segment.cfh
@@ -33,7 +33,7 @@ parcel Lucy;
 
 public class Lucy::Index::Segment cnick Seg inherits Clownfish::Obj {
 
-    CharBuf     *name;
+    String      *name;
     int64_t      count;
     int64_t      number;
     Hash        *by_name;   /* field numbers by name */
@@ -48,14 +48,14 @@ public class Lucy::Index::Segment cnick Seg inherits Clownfish::Obj {
 
     /** Return a segment name with a base-36-encoded segment number.
      */
-    inert incremented CharBuf*
+    inert incremented String*
     num_to_name(int64_t number);
 
-    /** Return true if the CharBuf is a segment name, i.e. matches this
+    /** Return true if the String is a segment name, i.e. matches this
      * pattern:  /^seg_\w+$/
      */
     inert bool
-    valid_seg_name(const CharBuf *name);
+    valid_seg_name(const String *name);
 
     /** Register a new field and assign it a field number.  If the field was
      * already known, nothing happens.
@@ -64,7 +64,7 @@ public class Lucy::Index::Segment cnick Seg inherits Clownfish::Obj {
      * @return the field's field number, which is a positive integer.
      */
     public int32_t
-    Add_Field(Segment *self, const CharBuf *field);
+    Add_Field(Segment *self, const String *field);
 
     /** Store arbitrary information in the segment's metadata Hash, to be
      * serialized later.  Throws an error if <code>key</code> is used twice.
@@ -73,7 +73,7 @@ public class Lucy::Index::Segment cnick Seg inherits Clownfish::Obj {
      * @param metadata JSON-izable data structure.
      */
     public void
-    Store_Metadata(Segment *self, const CharBuf *key,
+    Store_Metadata(Segment *self, const String *key,
                    decremented Obj *metadata);
 
     void
@@ -83,7 +83,7 @@ public class Lucy::Index::Segment cnick Seg inherits Clownfish::Obj {
     /** Fetch a value from the Segment's metadata hash.
      */
     public nullable Obj*
-    Fetch_Metadata(Segment *self, const CharBuf *key);
+    Fetch_Metadata(Segment *self, const String *key);
 
     nullable Obj*
     Fetch_Metadata_Str(Segment *self, const char *key, size_t len);
@@ -95,17 +95,17 @@ public class Lucy::Index::Segment cnick Seg inherits Clownfish::Obj {
      * @param field Field name.
      */
     public int32_t
-    Field_Num(Segment *self, const CharBuf *field);
+    Field_Num(Segment *self, const String *field);
 
     /** Given a field number, return the name of its field, or NULL if the
      * field name can't be found.
      */
-    public nullable CharBuf*
+    public nullable String*
     Field_Name(Segment *self, int32_t field_num);
 
     /** Getter for the object's seg name.
      */
-    public CharBuf*
+    public String*
     Get_Name(Segment *self);
 
     /** Getter for the segment number.

http://git-wip-us.apache.org/repos/asf/lucy/blob/2c3dbf15/core/Lucy/Index/Similarity.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/Similarity.c b/core/Lucy/Index/Similarity.c
index 6fff6ca..9323e73 100644
--- a/core/Lucy/Index/Similarity.c
+++ b/core/Lucy/Index/Similarity.c
@@ -84,15 +84,15 @@ Obj*
 Sim_Dump_IMP(Similarity *self) {
     Hash *dump = Hash_new(0);
     Hash_Store_Str(dump, "_class", 6,
-                   (Obj*)CB_Clone(Sim_Get_Class_Name(self)));
+                   (Obj*)Str_Clone(Sim_Get_Class_Name(self)));
     return (Obj*)dump;
 }
 
 Similarity*
 Sim_Load_IMP(Similarity *self, Obj *dump) {
     Hash *source = (Hash*)CERTIFY(dump, HASH);
-    CharBuf *class_name 
-        = (CharBuf*)CERTIFY(Hash_Fetch_Str(source, "_class", 6), CHARBUF);
+    String *class_name 
+        = (String*)CERTIFY(Hash_Fetch_Str(source, "_class", 6), STRING);
     VTable *vtable = VTable_singleton(class_name, NULL);
     Similarity *loaded = (Similarity*)VTable_Make_Obj(vtable);
     UNUSED_VAR(self);
@@ -107,8 +107,8 @@ Sim_Serialize_IMP(Similarity *self, OutStream *target) {
 
 Similarity*
 Sim_Deserialize_IMP(Similarity *self, InStream *instream) {
-    CharBuf *class_name = Freezer_read_charbuf(instream);
-    if (!CB_Equals(class_name, (Obj*)Sim_Get_Class_Name(self))) {
+    String *class_name = Freezer_read_charbuf(instream);
+    if (!Str_Equals(class_name, (Obj*)Sim_Get_Class_Name(self))) {
         THROW(ERR, "Class name mismatch: '%o' '%o'", Sim_Get_Class_Name(self),
               class_name);
     }

http://git-wip-us.apache.org/repos/asf/lucy/blob/2c3dbf15/core/Lucy/Index/SkipStepper.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/SkipStepper.c b/core/Lucy/Index/SkipStepper.c
index e92e23c..a78ff4f 100644
--- a/core/Lucy/Index/SkipStepper.c
+++ b/core/Lucy/Index/SkipStepper.c
@@ -51,10 +51,10 @@ SkipStepper_Read_Record_IMP(SkipStepper *self, InStream *instream) {
     ivars->filepos  += InStream_Read_C64(instream);
 }
 
-CharBuf*
+String*
 SkipStepper_To_String_IMP(SkipStepper *self) {
     SkipStepperIVARS *const ivars = SkipStepper_IVARS(self);
-    return CB_newf("skip doc: %u32 file pointer: %i64", ivars->doc_id,
+    return Str_newf("skip doc: %u32 file pointer: %i64", ivars->doc_id,
                    ivars->filepos);
 }
 

http://git-wip-us.apache.org/repos/asf/lucy/blob/2c3dbf15/core/Lucy/Index/SkipStepper.cfh
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/SkipStepper.cfh b/core/Lucy/Index/SkipStepper.cfh
index 6dceba9..1bfc8ac 100644
--- a/core/Lucy/Index/SkipStepper.cfh
+++ b/core/Lucy/Index/SkipStepper.cfh
@@ -37,7 +37,7 @@ class Lucy::Index::SkipStepper inherits Lucy::Util::Stepper {
     void
     Set_ID_And_Filepos(SkipStepper *self, int32_t doc_id, int64_t filepos);
 
-    public incremented CharBuf*
+    public incremented String*
     To_String(SkipStepper *self);
 }
 

http://git-wip-us.apache.org/repos/asf/lucy/blob/2c3dbf15/core/Lucy/Index/Snapshot.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/Snapshot.c b/core/Lucy/Index/Snapshot.c
index 3a2f2df..5ea097d 100644
--- a/core/Lucy/Index/Snapshot.c
+++ b/core/Lucy/Index/Snapshot.c
@@ -60,13 +60,13 @@ Snapshot_Destroy_IMP(Snapshot *self) {
 }
 
 void
-Snapshot_Add_Entry_IMP(Snapshot *self, const CharBuf *entry) {
+Snapshot_Add_Entry_IMP(Snapshot *self, const String *entry) {
     SnapshotIVARS *const ivars = Snapshot_IVARS(self);
     Hash_Store(ivars->entries, (Obj*)entry, (Obj*)CFISH_TRUE);
 }
 
 bool
-Snapshot_Delete_Entry_IMP(Snapshot *self, const CharBuf *entry) {
+Snapshot_Delete_Entry_IMP(Snapshot *self, const String *entry) {
     SnapshotIVARS *const ivars = Snapshot_IVARS(self);
     Obj *val = Hash_Delete(ivars->entries, (Obj*)entry);
     if (val) {
@@ -91,24 +91,24 @@ Snapshot_Num_Entries_IMP(Snapshot *self) {
 }
 
 void
-Snapshot_Set_Path_IMP(Snapshot *self, const CharBuf *path) {
+Snapshot_Set_Path_IMP(Snapshot *self, const String *path) {
     SnapshotIVARS *const ivars = Snapshot_IVARS(self);
     DECREF(ivars->path);
-    ivars->path = path ? CB_Clone(path) : NULL;
+    ivars->path = path ? Str_Clone(path) : NULL;
 }
 
-CharBuf*
+String*
 Snapshot_Get_Path_IMP(Snapshot *self) {
     return Snapshot_IVARS(self)->path;
 }
 
 Snapshot*
-Snapshot_Read_File_IMP(Snapshot *self, Folder *folder, const CharBuf *path) {
+Snapshot_Read_File_IMP(Snapshot *self, Folder *folder, const String *path) {
     SnapshotIVARS *const ivars = Snapshot_IVARS(self);
 
     // Eliminate all prior data. Pick a snapshot file.
     S_zero_out(self);
-    ivars->path = path ? CB_Clone(path) : IxFileNames_latest_snapshot(folder);
+    ivars->path = path ? Str_Clone(path) : IxFileNames_latest_snapshot(folder);
 
     if (ivars->path) {
         Hash *snap_data
@@ -139,8 +139,8 @@ Snapshot_Read_File_IMP(Snapshot *self, Folder *folder, const CharBuf *path) {
         }
         Hash_Clear(ivars->entries);
         for (uint32_t i = 0, max = VA_Get_Size(list); i < max; i++) {
-            CharBuf *entry
-                = (CharBuf*)CERTIFY(VA_Fetch(list, i), CHARBUF);
+            String *entry
+                = (String*)CERTIFY(VA_Fetch(list, i), STRING);
             Hash_Store(ivars->entries, (Obj*)entry, (Obj*)CFISH_TRUE);
         }
 
@@ -158,9 +158,9 @@ S_clean_segment_contents(VArray *orig) {
     // they cause a problem with FilePurger.
     VArray *cleaned = VA_new(VA_Get_Size(orig));
     for (uint32_t i = 0, max = VA_Get_Size(orig); i < max; i++) {
-        CharBuf *name = (CharBuf*)VA_Fetch(orig, i);
+        String *name = (String*)VA_Fetch(orig, i);
         if (!Seg_valid_seg_name(name)) {
-            if (CB_Starts_With_Str(name, "seg_", 4)) {
+            if (Str_Starts_With_Str(name, "seg_", 4)) {
                 continue;  // Skip this file.
             }
         }
@@ -171,7 +171,7 @@ S_clean_segment_contents(VArray *orig) {
 
 
 void
-Snapshot_Write_File_IMP(Snapshot *self, Folder *folder, const CharBuf *path) {
+Snapshot_Write_File_IMP(Snapshot *self, Folder *folder, const String *path) {
     SnapshotIVARS *const ivars = Snapshot_IVARS(self);
     Hash   *all_data = Hash_new(0);
     VArray *list     = Snapshot_List(self);
@@ -179,14 +179,14 @@ Snapshot_Write_File_IMP(Snapshot *self, Folder *folder, const CharBuf *path) {
     // Update path.
     DECREF(ivars->path);
     if (path) {
-        ivars->path = CB_Clone(path);
+        ivars->path = Str_Clone(path);
     }
     else {
-        CharBuf *latest = IxFileNames_latest_snapshot(folder);
+        String *latest = IxFileNames_latest_snapshot(folder);
         uint64_t gen = latest ? IxFileNames_extract_gen(latest) + 1 : 1;
         char base36[StrHelp_MAX_BASE36_BYTES];
         StrHelp_to_base36(gen, &base36);
-        ivars->path = CB_newf("snapshot_%s.json", &base36);
+        ivars->path = Str_newf("snapshot_%s.json", &base36);
         DECREF(latest);
     }
 
@@ -201,9 +201,9 @@ Snapshot_Write_File_IMP(Snapshot *self, Folder *folder, const CharBuf *path) {
 
     // Create a JSON-izable data structure.
     Hash_Store_Str(all_data, "format", 6,
-                   (Obj*)CB_newf("%i32", (int32_t)Snapshot_current_file_format));
+                   (Obj*)Str_newf("%i32", (int32_t)Snapshot_current_file_format));
     Hash_Store_Str(all_data, "subformat", 9,
-                   (Obj*)CB_newf("%i32", (int32_t)Snapshot_current_file_subformat));
+                   (Obj*)Str_newf("%i32", (int32_t)Snapshot_current_file_subformat));
 
     // Write out JSON-ized data to the new file.
     Json_spew_json((Obj*)all_data, folder, ivars->path);

http://git-wip-us.apache.org/repos/asf/lucy/blob/2c3dbf15/core/Lucy/Index/Snapshot.cfh
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/Snapshot.cfh b/core/Lucy/Index/Snapshot.cfh
index d82714f..01bff99 100644
--- a/core/Lucy/Index/Snapshot.cfh
+++ b/core/Lucy/Index/Snapshot.cfh
@@ -29,7 +29,7 @@ parcel Lucy;
 public class Lucy::Index::Snapshot inherits Clownfish::Obj {
 
     Hash        *entries;
-    CharBuf     *path;
+    String      *path;
 
     inert int32_t current_file_format;
 
@@ -55,7 +55,7 @@ public class Lucy::Index::Snapshot inherits Clownfish::Obj {
     /** Add a filepath to the snapshot.
      */
     public void
-    Add_Entry(Snapshot *self, const CharBuf *entry);
+    Add_Entry(Snapshot *self, const String *entry);
 
     /** Delete a filepath from the snapshot.
      *
@@ -63,7 +63,7 @@ public class Lucy::Index::Snapshot inherits Clownfish::Obj {
      * otherwise.
      */
     public bool
-    Delete_Entry(Snapshot *self, const CharBuf *entry);
+    Delete_Entry(Snapshot *self, const String *entry);
 
     /** Decode a snapshot file and initialize the object to reflect its
      * contents.
@@ -74,7 +74,7 @@ public class Lucy::Index::Snapshot inherits Clownfish::Obj {
      * @return the object, allowing an assignment idiom.
      */
     public Snapshot*
-    Read_File(Snapshot *self, Folder *folder, const CharBuf *path = NULL);
+    Read_File(Snapshot *self, Folder *folder, const String *path = NULL);
 
     /** Write a snapshot file.  The caller must lock the index while this
      * operation takes place, and the operation will fail if the snapshot file
@@ -86,18 +86,18 @@ public class Lucy::Index::Snapshot inherits Clownfish::Obj {
      * folder.
      */
     public void
-    Write_File(Snapshot *self, Folder *folder, const CharBuf *path = NULL);
+    Write_File(Snapshot *self, Folder *folder, const String *path = NULL);
 
     /** Set the path to the file that the Snapshot object serves as a proxy
      * for.
      */
     public void
-    Set_Path(Snapshot *self, const CharBuf *path);
+    Set_Path(Snapshot *self, const String *path);
 
     /** Get the path to the snapshot file.  Initially NULL; updated by
      * Read_File(), Write_File(), and Set_Path().
      */
-    public nullable CharBuf*
+    public nullable String*
     Get_Path(Snapshot *self);
 
     public void

http://git-wip-us.apache.org/repos/asf/lucy/blob/2c3dbf15/core/Lucy/Index/SortCache.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/SortCache.c b/core/Lucy/Index/SortCache.c
index 6b17c05..53f6d43 100644
--- a/core/Lucy/Index/SortCache.c
+++ b/core/Lucy/Index/SortCache.c
@@ -21,7 +21,7 @@
 #include "Lucy/Plan/FieldType.h"
 
 SortCache*
-SortCache_init(SortCache *self, const CharBuf *field, FieldType *type,
+SortCache_init(SortCache *self, const String *field, FieldType *type,
                void *ords, int32_t cardinality, int32_t doc_max, int32_t null_ord,
                int32_t ord_width) {
     SortCacheIVARS *const ivars = SortCache_IVARS(self);
@@ -33,7 +33,7 @@ SortCache_init(SortCache *self, const CharBuf *field, FieldType *type,
     if (!FType_Sortable(type)) {
         THROW(ERR, "Non-sortable FieldType for %o", field);
     }
-    ivars->field       = CB_Clone(field);
+    ivars->field       = Str_Clone(field);
     ivars->type        = (FieldType*)INCREF(type);
     ivars->ords        = ords;
     ivars->cardinality = cardinality;

http://git-wip-us.apache.org/repos/asf/lucy/blob/2c3dbf15/core/Lucy/Index/SortCache.cfh
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/SortCache.cfh b/core/Lucy/Index/SortCache.cfh
index 1ee617a..9554425 100644
--- a/core/Lucy/Index/SortCache.cfh
+++ b/core/Lucy/Index/SortCache.cfh
@@ -20,7 +20,7 @@ parcel Lucy;
  */
 class Lucy::Index::SortCache inherits Clownfish::Obj {
 
-    CharBuf   *field;
+    String    *field;
     FieldType *type;
     void      *ords;
     int32_t    doc_max;
@@ -30,7 +30,7 @@ class Lucy::Index::SortCache inherits Clownfish::Obj {
     bool       native_ords;
 
     public inert SortCache*
-    init(SortCache *self, const CharBuf *field, FieldType *type,
+    init(SortCache *self, const String *field, FieldType *type,
          void *ords, int32_t cardinality, int32_t doc_max, int32_t null_ord = -1,
          int32_t ord_width);
 

http://git-wip-us.apache.org/repos/asf/lucy/blob/2c3dbf15/core/Lucy/Index/SortCache/NumericSortCache.c
----------------------------------------------------------------------
diff --git a/core/Lucy/Index/SortCache/NumericSortCache.c b/core/Lucy/Index/SortCache/NumericSortCache.c
index 80b7d95..143c9c1 100644
--- a/core/Lucy/Index/SortCache/NumericSortCache.c
+++ b/core/Lucy/Index/SortCache/NumericSortCache.c
@@ -30,7 +30,7 @@
 #include "Lucy/Store/Folder.h"
 
 NumericSortCache*
-NumSortCache_init(NumericSortCache *self, const CharBuf *field,
+NumSortCache_init(NumericSortCache *self, const String *field,
                   FieldType *type, int32_t cardinality, int32_t doc_max,
                   int32_t null_ord, int32_t ord_width, InStream *ord_in,
                   InStream *dat_in) {
@@ -82,7 +82,7 @@ NumSortCache_Destroy_IMP(NumericSortCache *self) {
 /***************************************************************************/
 
 Float64SortCache*
-F64SortCache_new(const CharBuf *field, FieldType *type, int32_t cardinality,
+F64SortCache_new(const String *field, FieldType *type, int32_t cardinality,
                  int32_t doc_max, int32_t null_ord, int32_t ord_width,
                  InStream *ord_in, InStream *dat_in) {
     Float64SortCache *self
@@ -92,7 +92,7 @@ F64SortCache_new(const CharBuf *field, FieldType *type, int32_t cardinality,
 }
 
 Float64SortCache*
-F64SortCache_init(Float64SortCache *self, const CharBuf *field,
+F64SortCache_init(Float64SortCache *self, const String *field,
                   FieldType *type, int32_t cardinality, int32_t doc_max,
                   int32_t null_ord, int32_t ord_width, InStream *ord_in,
                   InStream *dat_in) {
@@ -127,7 +127,7 @@ F64SortCache_Make_Blank_IMP(Float64SortCache *self) {
 /***************************************************************************/
 
 Float32SortCache*
-F32SortCache_new(const CharBuf *field, FieldType *type, int32_t cardinality,
+F32SortCache_new(const String *field, FieldType *type, int32_t cardinality,
                  int32_t doc_max, int32_t null_ord, int32_t ord_width,
                  InStream *ord_in, InStream *dat_in) {
     Float32SortCache *self
@@ -137,7 +137,7 @@ F32SortCache_new(const CharBuf *field, FieldType *type, int32_t cardinality,
 }
 
 Float32SortCache*
-F32SortCache_init(Float32SortCache *self, const CharBuf *field,
+F32SortCache_init(Float32SortCache *self, const String *field,
                   FieldType *type, int32_t cardinality, int32_t doc_max,
                   int32_t null_ord, int32_t ord_width, InStream *ord_in,
                   InStream *dat_in) {
@@ -172,7 +172,7 @@ F32SortCache_Make_Blank_IMP(Float32SortCache *self) {
 /***************************************************************************/
 
 Int32SortCache*
-I32SortCache_new(const CharBuf *field, FieldType *type, int32_t cardinality,
+I32SortCache_new(const String *field, FieldType *type, int32_t cardinality,
                  int32_t doc_max, int32_t null_ord, int32_t ord_width,
                  InStream *ord_in, InStream *dat_in) {
     Int32SortCache *self
@@ -182,7 +182,7 @@ I32SortCache_new(const CharBuf *field, FieldType *type, int32_t cardinality,
 }
 
 Int32SortCache*
-I32SortCache_init(Int32SortCache *self, const CharBuf *field,
+I32SortCache_init(Int32SortCache *self, const String *field,
                   FieldType *type, int32_t cardinality, int32_t doc_max,
                   int32_t null_ord, int32_t ord_width, InStream *ord_in,
                   InStream *dat_in) {
@@ -217,7 +217,7 @@ I32SortCache_Make_Blank_IMP(Int32SortCache *self) {
 /***************************************************************************/
 
 Int64SortCache*
-I64SortCache_new(const CharBuf *field, FieldType *type, int32_t cardinality,
+I64SortCache_new(const String *field, FieldType *type, int32_t cardinality,
                  int32_t doc_max, int32_t null_ord, int32_t ord_width,
                  InStream *ord_in, InStream *dat_in) {
     Int64SortCache *self
@@ -227,7 +227,7 @@ I64SortCache_new(const CharBuf *field, FieldType *type, int32_t cardinality,
 }
 
 Int64SortCache*
-I64SortCache_init(Int64SortCache *self, const CharBuf *field,
+I64SortCache_init(Int64SortCache *self, const String *field,
                   FieldType *type, int32_t cardinality, int32_t doc_max,
                   int32_t null_ord, int32_t ord_width, InStream *ord_in,
                   InStream *dat_in) {