Forum Moderators: Robert Charlton & goodroi
server {
listen 80;
root /var/www;
set $mg12 '0';
if ($http_user_agent ~ 'MJ12bot') {
set $mj12 '1';
}
location =/robots.txt {
if ($mj12 = '1') {
try_files /robots-mj12.txt;
}
}
}
i don't think olly needs a unique robots.txt file served to MJ12bot, but rather to handle MJ12bot's http request.
That allows them to open a tiny http hole for the broken MJ12BOT
can't you do that without internally rewriting to a unique filename?
what olly really needs is to exclude robots.txt requests by MJ12bot from the 301 redirect from http to https.
[edited by: engine at 8:50 am (utc) on Mar 6, 2015]
[edit reason] fixed typo at poster request [/edit]
http {
include /etc/nginx/mime.types;
default_type application/octet-stream;
# Default is 1024, Digital Ocean suggests 2048
types_hash_max_size 2048;
# Increase default request size from 1mb to 100mb, account for large documents
client_max_body_size 100m;
#log_format main '$remote_addr - $remote_user [$time_local] "$request" '
# '$status $body_bytes_sent "$http_referer" '
# '"$http_user_agent" "$http_x_forwarded_for"';
#access_log logs/access.log main;
sendfile on;
#tcp_nopush on;
#keepalive_timeout 0;
keepalive_timeout 65;
#gzip on;
# reduce the data that needs to be sent over network
gzip on;
gzip_min_length 10240;
gzip_proxied expired no-cache no-store private auth;
gzip_types text/plain text/css text/xml text/javascript application/x-javascript application/xml;
gzip_disable "MSIE [1-6]\.";
# don't display server version for security
server_tokens off;
# request timed out -- default 60
client_body_timeout 10;
# Define your "upstream" servers - the
# servers request will be sent to
upstream app_example {
least_conn; # Use Least Connections strategy
server 127.0.0.1:3000; # NodeJS Server 1
# server 127.0.0.1:9001; # NodeJS Server 2
# server 127.0.0.1:9002; # NodeJS Server 3
}
server {
listen 80;
server_name www.example.com;
return 301 [$server_name$request_uri;...]
}
# Define the Nginx server
# This will proxy any non-static directory
server {
listen 443;
server_name localhost;
access_log /var/log/nginx/example.com-access.log;
error_log /var/log/nginx/example.com-error.log error;
## Google PageSpeed Configuration ##
# DISABLED UNTIL NEEDED
# pagespeed on;
# pagespeed ForceCaching on;
# pagespeed FileCachePath /var/cache/pagespeed;
# Ensure requests for pagespeed optimized resources go to the pagespeed handler
# and no extraneous headers get set.
# location ~ "\.pagespeed\.([a-z]\.)?[a-z]{2}\.[^.]{10}\.[^.]+" {
# add_header "" "";
# }
# location ~ "^/pagespeed_static/" { }
# location ~ "^/ngx_pagespeed_beacon$" { }
## SSL Configuration ##
ssl on;
ssl_certificate /etc/nginx/ssl/example-bundle.pem; # example-digicert-v2.pem + DigiCertSHA2SecureServerCA.pem;
ssl_certificate_key /etc/nginx/ssl/example-digicert-v2.key; # private key; no password
ssl_trusted_certificate /etc/nginx/ssl/oscp.pem; # Contains DigiCertSHA2SecureServerCA.pem only
# Strictest ciphers. Disabled for now
# ssl_ciphers 'AES128+EECDH:AES128+EDH:!aNULL';
# Ciphers for backwards compatability
ssl_ciphers "ECDHE-RSA-AES256-GCM-SHA384:ECDHE-RSA-AES128-GCM-SHA256:DHE-RSA-AES256-GCM-SHA384:
DHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-AES256-SHA384:ECDHE-RSA-AES128-SHA256:
ECDHE-RSA-AES256-SHA:ECDHE-RSA-AES128-SHA:DHE-RSA-AES256-SHA256:DHE-RSA-AES128-SHA256:
DHE-RSA-AES256-SHA:DHE-RSA-AES128-SHA:ECDHE-RSA-DES-CBC3-SHA:EDH-RSA-DES-CBC3-SHA:
AES256-GCM-SHA384:AES128-GCM-SHA256:AES256-SHA256:AES128-SHA256:AES256-SHA:
AES128-SHA:DES-CBC3-SHA:HIGH:!aNULL:!eNULL:!EXPORT:!DES:!MD5:!PSK:!RC4";
# Disable SSLv2 and SSLv3 - considered insecure
ssl_protocols TLSv1 TLSv1.1 TLSv1.2;
ssl_session_cache shared:SSL:10m;
# enables server-side protection from BEAST attacks
# http: //blog.ivanristic.com/2013/09/is-beast-still-a-threat.html
ssl_prefer_server_ciphers on;
ssl_dhparam /etc/nginx/ssl/dhparam.pem;
# OSCP Stapling
# As per discussion here [raymii.org...]
ssl_stapling on;
ssl_stapling_verify on;
resolver 8.8.8.8 8.8.4.4 valid=300s;
resolver_timeout 5s;
# Set HSTS for two years
add_header Strict-Transport-Security max-age=63072000;
# Prevent loading in a frame to deny clickjacking attempts
# [developer.mozilla.org...]
add_header X-Frame-Options DENY;
# Browser and robot always look for these
# Turn off logging for them
#location = /favicon.ico { log_not_found off; access_log off; }
#location = /robots.txt { log_not_found off; access_log off; }
# Handle static files so they are not proxied to NodeJS
# You may want to also hand these requests to other upstream
# servers, as you can define more than one!
location ~ ^/(images/|img/|javascript/|js/|css/|stylesheets/|flash/|media/|static/|robots.txt|humans.txt|favicon.ico) {
root /var/www/example-web; #note this contains the /src/dist folder in the web root.
expires 365d;
}
# pass the request to the node.js server
# with some correct headers for proxy-awareness
location / {
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header Host $http_host;
proxy_set_header X-NginX-Proxy true;
proxy_pass http ://localhost:3000/;
proxy_redirect off;
# Handle Web Socket connections
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
}
}
[edited by: phranque at 9:46 am (utc) on Mar 6, 2015]
[edit reason] inserted line breaks [/edit]
i don't have much nginx experience, but i would assume an internal rewrite (try_files in this specific case) would only work on the same machine.
server {
listen 80;
server_name www.example.com;
return 301 [$server_name$request_uri;...]
}
Ok, so with the context of the original question, what's wanted is:
100% https, other than serving 301 from every http request to the same url, but as https
Just set up the verification file and put in a support ticket. Thay can manually authorize it via Majestic support. :)