#user nobody;
worker_processes 1;
error_log /var/log/nginx/error.log;
#pid logs/nginx.pid;
events {
worker_connections 1024;
}
http {
include mime.types;
default_type application/octet-stream;
# CACHING OF API RESPONSES
# setup caching for the API Server responses
# proxy_cache_path specifies the root directory for our cache, the directory-depth (levels),
# the max_size of the cache and the inactive expire time.
# More importantly, it specifies the size of the in-memory keys for the files through keys_zone.
# When nginx receives a request, it computes an MD5 hash and uses this key set to find the corresponding
# file on disk. If it is not available, the request will hit the API server
proxy_cache_path /var/cache/nginx/api levels=1:2 keys_zone=nrapi:8m max_size=3000m inactive=600m;
proxy_cache_path /var/cache/nginx/reportcard levels=1:2 keys_zone=nrreportcard:8m max_size=3000m inactive=600m;
proxy_temp_path /var/tmp;
#log_format main '$remote_addr - $remote_user [$time_local] "$request" '
# '$status $body_bytes_sent "$http_referer" '
# '"$http_user_agent" "$http_x_forwarded_for"';
log_format nrformat '$remote_addr $http_x_username $http_x_request_id [$time_local] '
'"$request" $status $body_bytes_sent '
'"$http_referer" "$http_user_agent" '
'$request_time '
'$upstream_response_time';
access_log /var/log/nginx/access.log nrformat;
#access_log logs/access.log main;
#buffer optimizations
client_body_buffer_size 10K;
client_header_buffer_size 1k;
client_max_body_size 50m;
large_client_header_buffers 2 8k;
#timout optimizations
client_body_timeout 12;
client_header_timeout 12;
keepalive_timeout 15;
send_timeout 10;
sendfile on;
#tcp_nopush on;
#GZIP REQUESTS
gzip on;
gzip_comp_level 6;
gzip_vary on;
gzip_min_length 1000;
gzip_proxied any;
gzip_types text/plain text/css application/json application/x-javascript text/xml application/xml application/xml+rss text/javascript;
gzip_buffers 16 8k;
# LOAD BALANCING FOR API SERVICES
# load balancing API sevices
upstream ona_api_services {
#server 1
server 127.0.0.1:8085;
#minimum number of connections to keep alive to the api service
keepalive 64;
}
server {
listen 80;
server_name localhost;
set $APP_ROOT /home/ubuntu/project/client;
root $APP_ROOT;
# REDIRECT HTTP TRAFFIC TO HTTPS
# rewrite ^ https://$host$request_uri? permanent;
#charset koi8-r;
#access_log logs/host.access.log main;
#server side url re-write for fingerprinting of config.js
location /static/app/modules/config {
expires max;
rewrite ^(.*)config(.*)\.js$ /app/modules/config.js break;
}
#server side url re-write for fingerprinting of nr-combined-style.js
location /static/app/resources/css/nr-combined-style {
expires max;
rewrite ^(.*)nr-combined-style(.*)\.css$ /app/resources/css/nr-combined-style.css break;
}
#STATIC CACHING for all resources - css/js/image files
location /static {
autoindex on;
access_log off;
alias $APP_ROOT;
expires max;
}
# Configuration to get the total number of active connections
location /nginx_status {
# Turn on stats
stub_status on;
access_log off;
# only allow access from localhost #
allow 127.0.0.1;
deny all;
}
# PROXY SETUP FOR ONA API SERVICES
location /api/ {
# CACHING OF API RESPONSES
# This instructs nginx that it can use our api keys_set to cache incoming requests.
# MD5 hashes will be computed using the proxy_cache_key
proxy_cache nrapi;
proxy_cache_key sfs$request_uri$scheme;
proxy_cache_methods GET HEAD;
add_header X-Cache-Status $upstream_cache_status;
proxy_cache_valid any 24h;
# SETUP of NO CACHE FOR OTHER API's
set $no_cache "";
if ($uri ~ "/api/v1/lessons/subject/*") {
set $no_cache "1";
}
# Bypass cache if flag is set
proxy_no_cache $no_cache;
proxy_cache_bypass $no_cache;
proxy_redirect off;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header Host $http_host;
proxy_set_header X-NginX-Proxy true;
proxy_set_header Connection "";
proxy_http_version 1.1;
proxy_pass http://ona_api_services;
}
# CHECK FOR NETWORK ERRORS ON THE CLIENT
# DON'T CACHE THIS URL
location /ping {
autoindex on;
access_log off;
rewrite ^(.+)$ /app/templates/common/ping.json break;
expires 0;
}
# DEFAULT MAPPING FOR THE SINGLE PAGE APPLICATION
# DONT CACHE INDEX.HTML
location / {
autoindex on;
expires 0;
rewrite ^(.+)$ /app/index.html break;
}
}
}

我正在研究运行Nginx服务器(1.4.6)的require.js和Backbone.js内置的Web应用程序。我通过Backbone Model调用REST API(在Jetty服务器上运行)。同时点击特定的URL,Nginx响应502坏网关并且它被粘住。例如REST API网址是' http://window.protocol/api/user/ {userId}'并且它对每个userId工作正常,除了一个userId = amit123.While我看到登录Jetty服务器(REST API),没有请求' http://window.protocol/api/user/amit123'从客户端站点和Nginx服务器上的日志是502 Bad Gateway.I我附加了nginx.conf文件代码