使用squid也可以缓存" big"文件(对于大我的意思是> 20Mb),如果PC下载文件,第一次该文件已从互联网上下载。第二次来自squid缓存,但是:如果我尝试从另一台PC上下载同一个文件,那么同样的文件会从互联网下载而不是通过鱿鱼下载。
maximum_object_size 5 Gb
cache_dir ufs /data/vmware/squid-cache 30720 16 256
cache_mem 4096 MB
minimum_object_size 0
maximum_object_size_in_memory 512 Kb
cache_replacement_policy heap GDSF
cache_swap_low 85
cache_swap_high 90
half_closed_clients off
hosts_file /etc/hosts
memory_pools off
client_db off
dns_nameservers 127.0.0.1
via off
forwarded_for off
httpd_suppress_version_string off
follow_x_forwarded_for deny all
#visible_hostname sign.bunker.org
refresh_pattern ^ftp: 1440 20% 10080
refresh_pattern ^gopher: 1440 0% 1440
refresh_pattern -i \.(gif|png|jpg|jpeg|ico)$ 10080 90% 43200 override-expire ignore-no-cache ignore-no-store ignore-private
refresh_pattern -i \.(iso|avi|wav|mp3|mp4|mpeg|swf|flv|x-flv)$ 43200 90% 432000 override-expire ignore-no-cache ignore-no-store ignore-private
refresh_pattern -i \.(deb|rpm|exe|zip|tar|tgz|ram|rar|bin|ppt|doc|tiff)$ 10080 90% 43200 override-expire ignore-no-cache ignore-no-store ignore-private
refresh_pattern -i \.index.(html|htm)$ 0 40% 10080
refresh_pattern -i \.(html|htm|css|js)$ 1440 40% 40320
refresh_pattern . 0 40% 40320
refresh_pattern -i movies.com/.* 10080 90% 43200
refresh_pattern (/cgi-bin/|\?) 0 0% 0