[master] 12632f2 Qualify for the "evilphk" monicker:
Poul-Henning Kamp
phk at project.varnish-software.com
Sat Jan 22 17:53:24 CET 2011
commit 12632f242303debe3ba8adafea54ab8e35ad910f
Author: Poul-Henning Kamp <phk at FreeBSD.org>
Date: Sat Jan 22 16:51:17 2011 +0000
Qualify for the "evilphk" monicker:
For a limited number of cases we can now esi:include a gzip'ed
object into a gzip'ed ESI object.
There is less than 1 chance in hell this will work for you in
practice, I need to handle the N-1 other cases of last bit
and stop bit combinations in the gziped object, but that is
just a small (but tedious) matter of programming.
diff --git a/bin/varnishd/cache_esi_deliver.c b/bin/varnishd/cache_esi_deliver.c
index a3c9dc5..2be22e3 100644
--- a/bin/varnishd/cache_esi_deliver.c
+++ b/bin/varnishd/cache_esi_deliver.c
@@ -129,14 +129,22 @@ static void
ved_sendchunk(const struct sess *sp, const void *cb, ssize_t cl,
const void *ptr, ssize_t l)
{
+ char chunk[20];
- Debug("VER(%d) %d\n", (int)l, (int)(cb-ce));
assert(l > 0);
- if (sp->wrk->res_mode & RES_CHUNKED)
- (void)WRW_Write(sp->wrk, cb, cl);
+ if (sp->wrk->res_mode & RES_CHUNKED) {
+ if (cb == NULL) {
+ bprintf(chunk, "%jx\r\n", l);
+ (void)WRW_Write(sp->wrk, chunk, -1);
+ } else
+ (void)WRW_Write(sp->wrk, cb, cl);
+ }
(void)WRW_Write(sp->wrk, ptr, l);
- if (sp->wrk->res_mode & RES_CHUNKED)
+ if (sp->wrk->res_mode & RES_CHUNKED) {
(void)WRW_Write(sp->wrk, "\r\n", -1);
+ if (cb == NULL)
+ (void)WRW_Flush(sp->wrk);
+ }
}
static ssize_t
@@ -273,8 +281,14 @@ ESI_Deliver(struct sess *sp)
sp->wrk->crc = crc32_combine(
sp->wrk->crc, icrc, l_icrc);
sp->wrk->l_crc += l_icrc;
+if (sp->esi_level > 0 && off == 0) {
+assert(l > 10);
+ ved_sendchunk(sp, NULL, 0,
+ st->ptr + off + 10, l - 10);
+} else {
ved_sendchunk(sp, r, q - r,
st->ptr + off, l);
+}
off += l;
} else {
ved_pretend_gzip(sp,
@@ -342,11 +356,82 @@ void
ESI_DeliverChild(struct sess *sp)
{
struct storage *st;
+ struct object *obj;
+ ssize_t start, last, stop, l, lx, dl;
+ u_char *p;
+ uint32_t icrc;
+ uint32_t ilen;
- if (sp->obj->gziped) {
- INCOMPL();
- } else {
+ if (!sp->obj->gziped) {
VTAILQ_FOREACH(st, &sp->obj->store, list)
ved_pretend_gzip(sp, st->ptr, st->len);
+ return;
}
+ /*
+ * This is the interesting case: Deliver all the deflate
+ * blocks, stripping the "LAST" bit of the last one and
+ * padding it, as necessary, to a byte boundary.
+ */
+ obj = sp->obj;
+ CHECK_OBJ_NOTNULL(obj, OBJECT_MAGIC);
+ start = obj->gzip_start;
+ last = obj->gzip_last;
+ stop = obj->gzip_stop;
+ assert(last >= start);
+ assert(last < stop);
+ assert(start > 0 && start <= obj->len * 8);
+ assert(last > 0 && last <= obj->len * 8);
+ assert(stop > 0 && stop <= obj->len * 8);
+printf("BITS %jd %jd %jd\n", start, last, stop);
+
+ /* The start bit must be byte aligned. */
+ AZ(start & 7);
+
+ lx = 0;
+ VTAILQ_FOREACH(st, &sp->obj->store, list) {
+ p = st->ptr;
+ l = st->len;
+ xxxassert(start/8 < l);
+ if (start/8 > 0) {
+ l -= start/8;
+ p += start/8;
+ lx += start/8;
+ start = 0;
+ }
+ assert(l >= 0);
+ if (l == 0)
+ continue;
+ printf("XXXX: %jd %jd %jd\n", l, lx, last / 8);
+ dl = last/8 - lx;
+ if (dl > 0) {
+ if (dl > l)
+ dl = l;
+printf("CH1 %jd\n", dl);
+ ved_sendchunk(sp, NULL, 0, p, dl);
+ lx += dl;
+ p += dl;
+ l -= dl;
+ }
+ assert(l >= 0);
+ if (l == 0)
+ continue;
+ printf("XXXX: %jd %jd %jd %02x\n", l, lx, last / 8, *p);
+ /*
+ * If we are lucky, the last bit is aligned and in a copy
+ * block, detect and be happy
+ */
+ if (l >= 3 && (last & 7) == 0 &&
+ p[0] == 0x01 && p[1] == 0 && p[2] == 0)
+ break;
+ INCOMPL();
+ }
+ AZ(VTAILQ_NEXT(st, list));
+ assert(st->len > 8);
+ p = st->ptr + st->len - 8;
+ icrc = vle32dec(p);
+ ilen = vle32dec(p + 4);
+printf("CRC %08x LEN %d\n", icrc, ilen);
+ sp->wrk->crc = crc32_combine(sp->wrk->crc, icrc, ilen);
+ sp->wrk->l_crc += ilen;
+
}
diff --git a/bin/varnishtest/tests/e00023.vtc b/bin/varnishtest/tests/e00023.vtc
index 0d4457c..0bb17f8 100644
--- a/bin/varnishtest/tests/e00023.vtc
+++ b/bin/varnishtest/tests/e00023.vtc
@@ -1,6 +1,6 @@
# $Id$
-test "Include an ungzip'ed (ESI) object in a gzip'ed ESI object"
+test "Include gzip'ed and ungzip'ed (ESI) objects in a gzip'ed ESI object"
server s1 {
rxreq
@@ -11,14 +11,30 @@ server s1 {
<esi:include src="/foo"/> 3
Between includes 4
<esi:include src="/bar"/> 5
- After includes 6
+ Between includes 6
+ <esi:include src="/fooz"/> 7
+ Between includes 8
+ <esi:include src="/barz"/> 9
+ After includes 10
}
+
rxreq
expect req.url == "/foo"
txresp -body {<h1>/////<h2>}
+
rxreq
expect req.url == "/bar"
txresp -body {*****}
+
+ rxreq
+ expect req.http.accept-encoding == gzip
+ expect req.url == "/fooz"
+ txresp -gzipbody {<h1>-----<h2>}
+
+ rxreq
+ expect req.http.accept-encoding == gzip
+ expect req.url == "/barz"
+ txresp -gzipbody {#####}
} -start
varnish v1 -vcl+backend {
@@ -38,7 +54,7 @@ client c1 {
expect resp.http.content-encoding == gzip
gunzip
expect resp.status == 200
- expect resp.bodylen == 115
+ expect resp.bodylen == 192
}
client c1 -run
diff --git a/bin/varnishtest/vtc_http.c b/bin/varnishtest/vtc_http.c
index cf34a4e..9c9609d 100644
--- a/bin/varnishtest/vtc_http.c
+++ b/bin/varnishtest/vtc_http.c
@@ -417,8 +417,14 @@ http_swallow_body(struct http *hp, char * const *hh, int body)
}
l = hp->prxbuf;
http_rxchar(hp, 2);
- assert(vct_iscrlf(hp->rxbuf[l]));
- assert(vct_iscrlf(hp->rxbuf[l + 1]));
+ if(!vct_iscrlf(hp->rxbuf[l]))
+ vtc_log(hp->vl, 0,
+ "Wrong chunk tail[0] = %02x",
+ hp->rxbuf[l] & 0xff);
+ if(!vct_iscrlf(hp->rxbuf[l + 1]))
+ vtc_log(hp->vl, 0,
+ "Wrong chunk tail[1] = %02x",
+ hp->rxbuf[l + 1] & 0xff);
hp->prxbuf = l;
hp->rxbuf[l] = '\0';
if (i == 0)
More information about the varnish-commit
mailing list