c - Map variable-length string to int -


i can't figure out how write function takes following input , produces following output:

in (int) | out (char *) 0        | "" 1        | "a" 2        | "b" 3        | "c" 4        | "aa" 5        | "ab" 6        | "ac" 7        | "ba" 8        | "bb" ... 

it not converting input ternary, there difference "a" , "aa" (whereas there no difference between 0 , 00).

i have found correlation between length of string , input (len = floor(log2(in + 1)), when use a , b:

in (int) | floor(log2(in + 1)) | out (char *) 0        | 0                   | "" 1        | 1                   | "a" 2        | 1                   | "b" 3        | 2                   | "aa" 4        | 2                   | "ab" 5        | 2                   | "ba" 6        | 2                   | "bb" 7        | 3                   | "aaa" 8        | 3                   | "aab" 

what's general correlation between length of output , input value, given there n different valid characters?

this related to, distinctly different from, calc cell convertor in c. code swiftly derived code:

#include <ctype.h> #include <stdio.h> #include <string.h>  /* these declarations should in header */ extern char     *b3_row_encode(unsigned row, char *buffer); extern unsigned  b3_row_decode(const char *buffer);  static char *b3_encode(unsigned row, char *buffer) {     unsigned div = row / 3;     unsigned rem = row % 3;     if (div > 0)         buffer = b3_encode(div-1, buffer);     *buffer++ = rem + 'a';     *buffer = '\0';     return buffer; }  char *b3_row_encode(unsigned row, char *buffer) {     if (row == 0)     {         *buffer = '\0';         return buffer;     }     return(b3_encode(row-1, buffer)); }  unsigned b3_row_decode(const char *code) {     unsigned char c;     unsigned r = 0;     while ((c = *code++) != '\0')     {         if (!isalpha(c))             break;         c = tolower(c);         r = r * 3 + c - 'a' + 1;     }     return r; }  #ifdef test  static const struct {     unsigned col;     char     cell[10]; } tests[] = {     {    0,      "" },     {    1,     "a" },     {    2,     "b" },     {    3,     "c" },     {    4,    "aa" },     {    5,    "ab" },     {    6,    "ac" },     {    7,    "ba" },     {    8,    "bb" },     {    9,    "bc" },     {   10,    "ca" },     {   11,    "cb" },     {   12,    "cc" },     {   13,   "aaa" },     {   14,   "aab" },     {   16,   "aba" },     {   22,   "baa" },     {  169, "abcba" }, }; enum { num_tests = sizeof(tests) / sizeof(tests[0]) };  int main(void) {     int pass = 0;      (int = 0; < num_tests; i++)     {         char buffer[32];         b3_row_encode(tests[i].col, buffer);         unsigned n = b3_row_decode(buffer);         const char *pf = "fail";          if (strcmp(tests[i].cell, buffer) == 0 && n == tests[i].col)         {             pf = "pass";             pass++;         }         printf("%s: col %3u, cell (wanted: %-8s vs actual: %-8s) col = %3u\n",                pf, tests[i].col, tests[i].cell, buffer, n);     }      if (pass == num_tests)         printf("== pass == %d tests ok\n", pass);     else         printf("!! fail !! %d out of %d failed\n", (num_tests - pass), num_tests);      return (pass == num_tests) ? 0 : 1; }  #endif /* test */ 

the code includes test program , function convert string integer , function convert integer string. tests run back-to-back conversions. code not handle empty string zero.

sample output:

pass: col   0, cell (wanted:          vs actual:         ) col =   0 pass: col   1, cell (wanted:        vs actual:       ) col =   1 pass: col   2, cell (wanted: b        vs actual: b       ) col =   2 pass: col   3, cell (wanted: c        vs actual: c       ) col =   3 pass: col   4, cell (wanted: aa       vs actual: aa      ) col =   4 pass: col   5, cell (wanted: ab       vs actual: ab      ) col =   5 pass: col   6, cell (wanted: ac       vs actual: ac      ) col =   6 pass: col   7, cell (wanted: ba       vs actual: ba      ) col =   7 pass: col   8, cell (wanted: bb       vs actual: bb      ) col =   8 pass: col   9, cell (wanted: bc       vs actual: bc      ) col =   9 pass: col  10, cell (wanted: ca       vs actual: ca      ) col =  10 pass: col  11, cell (wanted: cb       vs actual: cb      ) col =  11 pass: col  12, cell (wanted: cc       vs actual: cc      ) col =  12 pass: col  13, cell (wanted: aaa      vs actual: aaa     ) col =  13 pass: col  14, cell (wanted: aab      vs actual: aab     ) col =  14 pass: col  16, cell (wanted: aba      vs actual: aba     ) col =  16 pass: col  22, cell (wanted: baa      vs actual: baa     ) col =  22 pass: col 169, cell (wanted: abcba    vs actual: abcba   ) col = 169 == pass == 18 tests ok 

Comments

Popular posts from this blog

javascript - Count length of each class -

What design pattern is this code in Javascript? -

hadoop - Restrict secondarynamenode to be installed and run on any other node in the cluster -