我在Nginx上成功使用了Varnish 4 + Pagespeed,但是varnish没有打到caching,因为这个站点总是推送一个语言的cookie,不pipe用户是否被authentication。
这是有道理的,因为在访问期间,用户可以改变网站上的语言,并且cookie提供关于正在使用什么语言的信息。
那么,我怎样才能让这两种语言的清漆caching页面,而不是只是避免caching,然后回来?
完整的configuration在这里:
# Marker to tell the VCL compiler that this VCL has been adapted to the # new 4.0 format. vcl 4.0; import std; # Block 1: Define upstream server's host and port. Set this to point to your # content server. backend default { .host = "127.0.0.1"; .port = "8080"; } # Block 2: Define a key based on the User-Agent which can be used for hashing. # Also set the PS-CapabilityList header for PageSpeed server to respect. sub generate_user_agent_based_key { # Define placeholder PS-CapabilityList header values for large and small # screens with no UA dependent optimizations. Note that these placeholder # values should not contain any of ll, ii, dj, jw or ws, since these # codes will end up representing optimizations to be supported for the # request. set req.http.default_ps_capability_list_for_large_screens = "LargeScreen.SkipUADependentOptimizations:"; set req.http.default_ps_capability_list_for_small_screens = "TinyScreen.SkipUADependentOptimizations:"; # As a fallback, the PS-CapabilityList header that is sent to the upstream # PageSpeed server should be for a large screen device with no browser # specific optimizations. set req.http.PS-CapabilityList = req.http.default_ps_capability_list_for_large_screens; # Cache-fragment 1: Desktop User-Agents that support lazyload_images (ll), # inline_images (ii) and defer_javascript (dj). # Note: Wget is added for testing purposes only. if (req.http.User-Agent ~ "(?i)Chrome/|Firefox/|MSIE |Safari|Wget") { set req.http.PS-CapabilityList = "ll,ii,dj:"; } # Cache-fragment 2: Desktop User-Agents that support lazyload_images (ll), # inline_images (ii), defer_javascript (dj), webp (jw) and lossless_webp # (ws). if (req.http.User-Agent ~ "(?i)Chrome/[2][3-9]+\.|Chrome/[[3-9][0-9]+\.|Chrome/[0-9]{3,}\.") { set req.http.PS-CapabilityList = "ll,ii,dj,jw,ws:"; } # Cache-fragment 3: This fragment contains (a) Desktop User-Agents that # match fragments 1 or 2 but should not because they represent older # versions of certain browsers or bots and (b) Tablet User-Agents that # on all browsers and use image compression qualities applicable to large # screens. Note that even tablets that are capable of supporting inline or # webp images, eg Android 4.1.2, will not get these advanced # optimizations. if (req.http.User-Agent ~ "(?i)Firefox/[1-2]\.|MSIE [5-8]\.|bot|Yahoo!|Ruby|RPT-HTTPClient|(Google \(\+https\:\/\/developers\.google\.com\/\+\/web\/snippet\/\))|Android|iPad|TouchPad|Silk-Accelerated|Kindle Fire") { set req.http.PS-CapabilityList = req.http.default_ps_capability_list_for_large_screens; } # Cache-fragment 4: Mobiles and small screen tablets will use image # compression qualities applicable to small screens, but all other # optimizations will be those that work on all browsers. if (req.http.User-Agent ~ "(?i)Mozilla.*Android.*Mobile*|iPhone|BlackBerry|Opera Mobi|Opera Mini|SymbianOS|UP.Browser|J-PHONE|Profile/MIDP|portalmmm|DoCoMo|Obigo|Galaxy Nexus|GT-I9300|GT-N7100|HTC One|Nexus [4|7|S]|Xoom|XT907") { set req.http.PS-CapabilityList = req.http.default_ps_capability_list_for_small_screens; } # Remove placeholder header values. unset req.http.default_ps_capability_list_for_large_screens; unset req.http.default_ps_capability_list_for_large_screens; } sub vcl_hash { # Block 3: Use the PS-CapabilityList value for computing the hash. hash_data(req.http.PS-CapabilityList); } # Block 3a: Define ACL for purge requests acl purge { # Purge requests are only allowed from localhost. "localhost"; "127.0.0.1"; } # Block 4: In vcl_recv, on receiving a request, call the method responsible for # generating the User-Agent based key for hashing into the cache. sub vcl_recv { # We want to support beaconing filters, ie, one or more of inline_images, # lazyload_images, inline_preview_images or prioritize_critical_css are # enabled. We define a placeholder constant called ps_should_beacon_key_value # so that some percentages of hits and misses can be sent to the backend # with this value used for the PS-ShouldBeacon header to force beaconing. # This value should match the value of the DownstreamCacheRebeaconingKey # pagespeed directive used by your backend server. # WARNING: Do not use "random_rebeaconing_key" for your configuration, but # instead change it to something specific to your site, to keep it secure. set req.http.ps_should_beacon_key_value = "random_rebeaconing_key"; # Incoming PS-ShouldBeacon headers should not be allowed since this will allow # external entities to force the server to instrument pages. unset req.http.PS-ShouldBeacon; call generate_user_agent_based_key; # Block 3d: Verify the ACL for an incoming purge request and handle it. if (req.method == "PURGE") { if (!client.ip ~ purge) { return (synth(405,"Not allowed.")); } return (purge); } # Blocks which decide whether cache should be bypassed or not go here. # Block 5a: Bypass the cache for .pagespeed. resource. PageSpeed has its own # cache for these, and these could bloat up the caching layer. if (req.url ~ "\.pagespeed\.([az]\.)?[az]{2}\.[^.]{10}\.[^.]+") { # Skip the cache for .pagespeed. resource. PageSpeed has its own # cache for these, and these could bloat up the caching layer. return (pass); } # Block 5b: Only cache responses to clients that support gzip. Most clients # do, and the cache holds much more if it stores gzipped responses. if (req.http.Accept-Encoding !~ "gzip") { return (pass); } # --- WordPress specific configuration # Did not cache the RSS feed if (req.url ~ "/feed") { return (pass); } # Blitz hack if (req.url ~ "/mu-.*") { return (pass); } # Did not cache the admin and login pages if (req.url ~ "/wp-(login|admin)") { return (pass); } # Remove the "has_js" cookie set req.http.Cookie = regsuball(req.http.Cookie, "has_js=[^;]+(; )?", ""); # Remove any Google Analytics based cookies set req.http.Cookie = regsuball(req.http.Cookie, "__utm.=[^;]+(; )?", ""); # Remove the Quant Capital cookies (added by some plugin, all __qca) set req.http.Cookie = regsuball(req.http.Cookie, "__qc.=[^;]+(; )?", ""); # Remove the wp-settings-1 cookie set req.http.Cookie = regsuball(req.http.Cookie, "wp-settings-1=[^;]+(; )?", ""); # Remove the wp-settings-time-1 cookie set req.http.Cookie = regsuball(req.http.Cookie, "wp-settings-time-1=[^;]+(; )?", ""); # Remove the wp test cookie set req.http.Cookie = regsuball(req.http.Cookie, "wordpress_test_cookie=[^;]+(; )?", ""); # Are there cookies left with only spaces or that are empty? if (req.http.cookie ~ "^ *$") { unset req.http.cookie; } # Check the cookies for wordpress-specific items if (req.http.Cookie ~ "wordpress_" || req.http.Cookie ~ "comment_") { return (pass); } if (!req.http.cookie) { unset req.http.cookie; } #Avoid caching Woocommerce areas if (req.url ~ "^/(cart|my-account|checkout|addons)") { return (pass); } if ( req.url ~ "\?add-to-cart=" ) { return (pass); } } # Block 6: Mark HTML uncacheable by caches beyond our control. sub vcl_backend_response { if (beresp.http.Content-Type ~ "text/html") { # Hide the upstream cache control header. unset beresp.http.Cache-Control; # Add no-cache Cache-Control header for html. set beresp.http.Cache-Control = "no-cache, max-age=0"; } } sub vcl_hit { if (std.random(0, 100) < 5) { set req.http.PS-ShouldBeacon = req.http.ps_should_beacon_key_value; return (pass); } } sub vcl_miss { # Send 25% of the MISSes to the backend for instrumentation. if (std.random(0, 100) < 25) { set req.http.PS-ShouldBeacon = req.http.ps_should_beacon_key_value; return (pass); } } # Block 7: Add a header for identifying cache hits/misses. sub vcl_deliver { if (obj.hits > 0) { set resp.http.X-Cache = "HIT"; } else { set resp.http.X-Cache = "MISS"; } }
将cookie添加到散列,以便每个使用这些cookie的用户都可以收到他们自己版本的caching网站内容。
文档在这里: https : //www.varnish-cache.org/docs/trunk/users-guide/vcl-hashing.html
覆盖默认的VCL,所以拥有这个cookie并不意味着你不会被caching。