view h2_proxy_cache.t @ 1240:f7eb2875ed45

Tests: avoid interleaved output in Upgrade handling tests. When the testing script is run in verbose mode by prove that redirects stdout, a garbled verbose mode line from backend can be produced that incorporates TAP output of an individual test result, which eventually breaks the testing plan. Notably, this happens when testing sending multiple frames if backend started to respond before all frames were received. This is possible due to the line boundary used as an indicator of last bytes to receive before starting to send. The fix is to amend the only last frame of many specially, for that purpose.
author Sergey Kandaurov <pluknet@nginx.com>
date Wed, 15 Nov 2017 20:16:09 +0300
parents efccab043dd3
children 93453d7858ce
line wrap: on
line source

#!/usr/bin/perl

# (C) Sergey Kandaurov
# (C) Nginx, Inc.

# Tests for HTTP/2 protocol with cache.

###############################################################################

use warnings;
use strict;

use Test::More;

BEGIN { use FindBin; chdir($FindBin::Bin); }

use lib 'lib';
use Test::Nginx;
use Test::Nginx::HTTP2;

###############################################################################

select STDERR; $| = 1;
select STDOUT; $| = 1;

my $t = Test::Nginx->new()->has(qw/http http_v2 proxy cache rewrite/)->plan(12)
	->write_file_expand('nginx.conf', <<'EOF');

%%TEST_GLOBALS%%

daemon off;

events {
}

http {
    %%TEST_GLOBALS_HTTP%%

    proxy_cache_path %%TESTDIR%%/cache    keys_zone=NAME:1m;

    # quit unfixed nginx timely on different linuces
    http2_idle_timeout 2s;
    http2_recv_timeout 2s;

    server {
        listen       127.0.0.1:8080 http2;
        listen       127.0.0.1:8081;
        server_name  localhost;

        location /cache {
            proxy_pass http://127.0.0.1:8081/;
            proxy_cache NAME;
            proxy_cache_valid 1m;
        }
        location /proxy_buffering_off {
            proxy_pass http://127.0.0.1:8081/;
            proxy_cache NAME;
            proxy_cache_valid 1m;
            proxy_buffering off;
        }
        location / {
            if ($arg_slow) {
                set $limit_rate 200;
            }
        }
    }
}

EOF

$t->write_file('t.html', 'SEE-THIS');
$t->run();

###############################################################################

# simple proxy cache test

my $s = Test::Nginx::HTTP2->new();
my $sid = $s->new_stream({ path => '/cache/t.html' });
my $frames = $s->read(all => [{ sid => $sid, fin => 1 }]);

my ($frame) = grep { $_->{type} eq "HEADERS" } @$frames;
is($frame->{headers}->{':status'}, '200', 'proxy cache');

my $etag = $frame->{headers}->{'etag'};

($frame) = grep { $_->{type} eq "DATA" } @$frames;
is($frame->{length}, length 'SEE-THIS', 'proxy cache - DATA');
is($frame->{data}, 'SEE-THIS', 'proxy cache - DATA payload');

$t->write_file('t.html', 'NOOP');

$sid = $s->new_stream({ headers => [
	{ name => ':method', value => 'GET', mode => 0 },
	{ name => ':scheme', value => 'http', mode => 0 },
	{ name => ':path', value => '/cache/t.html' },
	{ name => ':authority', value => 'localhost', mode => 1 },
	{ name => 'if-none-match', value => $etag }]});
$frames = $s->read(all => [{ sid => $sid, fin => 1 }]);

($frame) = grep { $_->{type} eq "HEADERS" } @$frames;
is($frame->{headers}->{':status'}, 304, 'proxy cache conditional');

$t->write_file('t.html', 'SEE-THIS');

# request body with cached response

$sid = $s->new_stream({ path => '/cache/t.html', body_more => 1 });
$s->h2_body('TEST');
$frames = $s->read(all => [{ sid => $sid, fin => 1 }]);

($frame) = grep { $_->{type} eq "HEADERS" } @$frames;
is($frame->{headers}->{':status'}, 200, 'proxy cache - request body');

$s->h2_ping('SEE-THIS');
$frames = $s->read(all => [{ type => 'PING' }]);

($frame) = grep { $_->{type} eq "PING" && $_->{flags} & 0x1 } @$frames;
ok($frame, 'proxy cache - request body - next');

# HEADERS could be received with fin, followed by DATA

$s = Test::Nginx::HTTP2->new();
$sid = $s->new_stream({ path => '/cache/t.html?1', method => 'HEAD' });

$frames = $s->read(all => [{ sid => $sid, fin => 1 }], wait => 0.2);
push @$frames, $_ for @{$s->read(all => [{ sid => $sid }], wait => 0.2)};
ok(!grep ({ $_->{type} eq "DATA" } @$frames), 'proxy cache HEAD - no body');

# proxy cache - expect no stray empty DATA frame

TODO: {
local $TODO = 'not yet';

$s = Test::Nginx::HTTP2->new();
$sid = $s->new_stream({ path => '/cache/t.html?2' });

$frames = $s->read(all => [{ sid => $sid, fin => 1 }]);
my @data = grep ({ $_->{type} eq "DATA" } @$frames);
is(@data, 1, 'proxy cache write - data frames');
is(join(' ', map { $_->{data} } @data), 'SEE-THIS', 'proxy cache write - data');
is(join(' ', map { $_->{flags} } @data), '1', 'proxy cache write - flags');

}

# HEAD on empty cache with proxy_buffering off

$s = Test::Nginx::HTTP2->new();
$sid = $s->new_stream(
	{ path => '/proxy_buffering_off/t.html?1', method => 'HEAD' });

$frames = $s->read(all => [{ sid => $sid, fin => 1 }]);
push @$frames, $_ for @{$s->read(all => [{ sid => $sid }], wait => 0.2)};
ok(!grep ({ $_->{type} eq "DATA" } @$frames),
	'proxy cache HEAD buffering off - no body');

# client cancels stream with a cacheable request that was sent to upstream
# HEADERS should not be produced for the canceled stream

$s = Test::Nginx::HTTP2->new();
$sid = $s->new_stream({ path => '/cache/t.html?slow=1' });

$s->h2_rst($sid, 8);

$frames = $s->read(all => [{ sid => $sid, fin => 0x4 }], wait => 1.2);
ok(!(grep { $_->{type} eq "HEADERS" } @$frames), 'no headers');

# client closes connection after sending a cacheable request producing alert

$s = Test::Nginx::HTTP2->new();
$sid = $s->new_stream({ path => '/cache/t.html?4' });

undef $s;
select undef, undef, undef, 0.2;

$t->stop();

###############################################################################