aboutsummaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorB. Watson <urchlay@slackware.uk>2025-12-01 16:33:56 -0500
committerB. Watson <urchlay@slackware.uk>2025-12-01 16:33:56 -0500
commitd58c6b576445d7c46765bab73d7564956263f7f0 (patch)
treed0f028f277615d53ec151581bff3a8d5c94091fd /src
parent5e971d7fc7df0c4d3bc032c7e85e214617eaa2d4 (diff)
downloadalftools-d58c6b576445d7c46765bab73d7564956263f7f0.tar.gz
Implement -vvv for alf.
Diffstat (limited to 'src')
-rw-r--r--src/crunch.c61
1 files changed, 61 insertions, 0 deletions
diff --git a/src/crunch.c b/src/crunch.c
index 880f059..2369db5 100644
--- a/src/crunch.c
+++ b/src/crunch.c
@@ -17,6 +17,8 @@
#define TOK_END 257
#define INIT_TOKEN 258
+extern int opt_verbose; /* alf.c, -v option */
+
u8 input_buf[MAX_INPUT_SIZE];
u8 output_buf[MAX_INPUT_SIZE];
unsigned int input_len, output_len, out_bitpos;
@@ -55,6 +57,46 @@ void init_table(void) {
curr_token = INIT_TOKEN;
}
+char *fmt_chr(u8 c) {
+ static char buf[10];
+ if(c > 33 && c < 127)
+ sprintf(buf, "'%c'", c);
+ else
+ sprintf(buf, "$%02x", c);
+ return buf;
+}
+
+void dump_kids(token_t *t, int level) {
+ token_t *kid;
+ int i;
+
+ if(t->kids) {
+ kid = t->kids;
+ while(kid) {
+ for(i = 0; i < level; i++)
+ fputs(" ", stdout);
+ printf("#%d/%s\n", kid->number, fmt_chr(kid->chr));
+ dump_kids(kid, level + 1);
+ kid = kid->sibling;
+ }
+ } else {
+ for(i = 0; i < level; i++)
+ fputs(" ", stdout);
+ fputs("(no kids)\n", stdout);
+ }
+}
+
+void dump_tokens(void) {
+ int i;
+
+ for(i = 0; i < 256; i++) {
+ if(root_tokens[i].kids) {
+ printf("root_tokens[%s], #%d\n", fmt_chr(root_tokens[i].chr), root_tokens[i].number);
+ dump_kids(&root_tokens[i], 1);
+ }
+ }
+}
+
void inc_output_len(void) {
if(++output_len == MAX_INPUT_SIZE) {
fprintf(stderr, "%s: fatal: compressed file would be >16MB.\n", self);
@@ -74,6 +116,10 @@ void append_bit(int bit) {
void store_token(int tok) {
int mask;
+ if(opt_verbose > 2) {
+ printf("<%d >%d:%d #%d\n", in_pos, output_len, out_bitpos, tok);
+ }
+
for(mask = 1 << (token_bits - 1); mask; mask >>= 1) {
append_bit(tok & mask ? 1 : 0);
}
@@ -149,13 +195,24 @@ void make_token(token_t *oldtok, u8 newchr) {
/* if the token table is full, reset it. basically start over like
we would with a new file. */
if(curr_token == max_token) {
+ if(opt_verbose > 2) {
+ printf("\ntoken %d won't fit in %d bits, ", max_token, token_bits);
+ }
if(token_bits == MAX_BITS) {
+ if(opt_verbose > 2) {
+ printf("resetting token_bits to %d\n", INITIAL_BITS);
+ printf("token table is full, clearing, old contents:\n");
+ dump_tokens();
+ }
store_token(TOK_RESET); /* stored at the *old* token size! */
token_bits = INITIAL_BITS;
init_table();
return; /* since we're starting over, *don't* make a token */
} else {
token_bits++;
+ if(opt_verbose > 2) {
+ printf("token_bits increased to %d\n", token_bits);
+ }
}
max_token = 1 << token_bits;
}
@@ -191,6 +248,10 @@ void crunch(void) {
store_token(TOK_END);
if(out_bitpos) inc_output_len();
+ if(opt_verbose > 2) {
+ printf("\nfinal token table contents:\n");
+ dump_tokens();
+ }
init_table();
}