[Nanobox] Tuning Update (#6660)

Various preformance and stability enhancements for instances deployed via Nanobox.
lolsob-rspec
Daniel Hunsaker 2018-03-06 13:59:35 -07:00 committed by Eugen Rochko
parent c717f76652
commit b9a6ca1c09
3 changed files with 110 additions and 16 deletions

View File

@ -13,11 +13,29 @@ DB_PORT=5432
DATABASE_URL=postgresql://$DATA_DB_USER:$DATA_DB_PASS@$DATA_DB_HOST/gonano DATABASE_URL=postgresql://$DATA_DB_USER:$DATA_DB_PASS@$DATA_DB_HOST/gonano
# Optional ElasticSearch configuration
# ES_ENABLED=true
# ES_HOST=localhost
# ES_PORT=9200
# Optimizations
LD_PRELOAD=/data/lib/libjemalloc.so
# ImageMagick optimizations
MAGICK_TEMPORARY_PATH=/app/tmp
MAGICK_MEMORY_LIMIT=128MiB
MAGICK_MAP_LIMIT=64MiB
MAGICK_TIME_LIMIT=15
MAGICK_AREA_LIMIT=16MP
MAGICK_WIDTH_LIMIT=8KP
MAGICK_HEIGHT_LIMIT=8KP
# Federation # Federation
# Note: Changing LOCAL_DOMAIN or LOCAL_HTTPS at a later time will cause unwanted side effects. # Note: Changing LOCAL_DOMAIN at a later time will cause unwanted side effects, including breaking all existing federation.
# LOCAL_DOMAIN should *NOT* contain the protocol part of the domain e.g https://example.com. # LOCAL_DOMAIN should *NOT* contain the protocol part of the domain e.g https://example.com.
LOCAL_DOMAIN=${APP_NAME}.nanoapp.io LOCAL_DOMAIN=${APP_NAME}.nanoapp.io
LOCAL_HTTPS=false
# Changing LOCAL_HTTPS in production is no longer supported. (Mastodon will always serve https:// links)
# Use this only if you need to run mastodon on a different domain than the one used for federation. # Use this only if you need to run mastodon on a different domain than the one used for federation.
# You can read more about this option on https://github.com/tootsuite/documentation/blob/master/Running-Mastodon/Serving_a_different_domain.md # You can read more about this option on https://github.com/tootsuite/documentation/blob/master/Running-Mastodon/Serving_a_different_domain.md
@ -31,7 +49,6 @@ LOCAL_HTTPS=false
# Application secrets # Application secrets
# Generate each with the `rake secret` task (`nanobox run bundle exec rake secret`) # Generate each with the `rake secret` task (`nanobox run bundle exec rake secret`)
PAPERCLIP_SECRET=$PAPERCLIP_SECRET
SECRET_KEY_BASE=$SECRET_KEY_BASE SECRET_KEY_BASE=$SECRET_KEY_BASE
OTP_SECRET=$OTP_SECRET OTP_SECRET=$OTP_SECRET
@ -131,9 +148,79 @@ SMTP_FROM_ADDRESS=notifications@${APP_NAME}.nanoapp.io
# Cluster number setting for streaming API server. # Cluster number setting for streaming API server.
# If you comment out following line, cluster number will be `numOfCpuCores - 1`. # If you comment out following line, cluster number will be `numOfCpuCores - 1`.
STREAMING_CLUSTER_NUM=1 # STREAMING_CLUSTER_NUM=1
# Docker mastodon user # Docker mastodon user
# If you use Docker, you may want to assign UID/GID manually. # If you use Docker, you may want to assign UID/GID manually.
# UID=1000 # UID=1000
# GID=1000 # GID=1000
# LDAP authentication (optional)
# LDAP_ENABLED=true
# LDAP_HOST=localhost
# LDAP_PORT=389
# LDAP_METHOD=simple_tls
# LDAP_BASE=
# LDAP_BIND_DN=
# LDAP_PASSWORD=
# LDAP_UID=cn
# PAM authentication (optional)
# PAM authentication uses for the email generation the "email" pam variable
# and optional as fallback PAM_DEFAULT_SUFFIX
# The pam environment variable "email" is provided by:
# https://github.com/devkral/pam_email_extractor
# PAM_ENABLED=true
# Fallback Suffix for email address generation (nil by default)
# PAM_DEFAULT_SUFFIX=pam
# Name of the pam service (pam "auth" section is evaluated)
# PAM_DEFAULT_SERVICE=rpam
# Name of the pam service used for checking if an user can register (pam "account" section is evaluated) (nil (disabled) by default)
# PAM_CONTROLLED_SERVICE=rpam
# Global OAuth settings (optional) :
# If you have only one strategy, you may want to enable this
# OAUTH_REDIRECT_AT_SIGN_IN=true
# Optional CAS authentication (cf. omniauth-cas) :
# CAS_ENABLED=true
# CAS_URL=https://sso.myserver.com/
# CAS_HOST=sso.myserver.com/
# CAS_PORT=443
# CAS_SSL=true
# CAS_VALIDATE_URL=
# CAS_CALLBACK_URL=
# CAS_LOGOUT_URL=
# CAS_LOGIN_URL=
# CAS_UID_FIELD='user'
# CAS_CA_PATH=
# CAS_DISABLE_SSL_VERIFICATION=false
# CAS_UID_KEY='user'
# CAS_NAME_KEY='name'
# CAS_EMAIL_KEY='email'
# CAS_NICKNAME_KEY='nickname'
# CAS_FIRST_NAME_KEY='firstname'
# CAS_LAST_NAME_KEY='lastname'
# CAS_LOCATION_KEY='location'
# CAS_IMAGE_KEY='image'
# CAS_PHONE_KEY='phone'
# Optional SAML authentication (cf. omniauth-saml)
# SAML_ENABLED=true
# SAML_ACS_URL=
# SAML_ISSUER=http://localhost:3000/auth/auth/saml/callback
# SAML_IDP_SSO_TARGET_URL=https://idp.testshib.org/idp/profile/SAML2/Redirect/SSO
# SAML_IDP_CERT=
# SAML_IDP_CERT_FINGERPRINT=
# SAML_NAME_IDENTIFIER_FORMAT=
# SAML_CERT=
# SAML_PRIVATE_KEY=
# SAML_SECURITY_WANT_ASSERTION_SIGNED=true
# SAML_SECURITY_WANT_ASSERTION_ENCRYPTED=true
# SAML_SECURITY_ASSUME_EMAIL_IS_VERIFIED=true
# SAML_ATTRIBUTES_STATEMENTS_UID="urn:oid:0.9.2342.19200300.100.1.1"
# SAML_ATTRIBUTES_STATEMENTS_EMAIL="urn:oid:1.3.6.1.4.1.5923.1.1.1.6"
# SAML_ATTRIBUTES_STATEMENTS_FULL_NAME="urn:oid:2.5.4.42"
# SAML_UID_ATTRIBUTE="urn:oid:0.9.2342.19200300.100.1.1"
# SAML_ATTRIBUTES_STATEMENTS_VERIFIED=
# SAML_ATTRIBUTES_STATEMENTS_VERIFIED_EMAIL=

View File

@ -1,7 +1,7 @@
run.config: run.config:
engine: ruby engine: ruby
engine.config: engine.config:
runtime: ruby-2.4 runtime: ruby-2.5
extra_packages: extra_packages:
# basic servers: # basic servers:
@ -10,6 +10,7 @@ run.config:
# for images: # for images:
- ImageMagick - ImageMagick
- jemalloc
# for videos: # for videos:
- ffmpeg3 - ffmpeg3
@ -37,7 +38,7 @@ run.config:
- yarn.lock - yarn.lock
extra_steps: extra_steps:
- envsubst < .env.nanobox > .env - cp .env.nanobox .env
- yarn - yarn
fs_watch: true fs_watch: true
@ -47,7 +48,7 @@ deploy.config:
extra_steps: extra_steps:
- NODE_ENV=production bundle exec rake assets:precompile - NODE_ENV=production bundle exec rake assets:precompile
transform: transform:
- "sed 's/LOCAL_HTTPS=.*/LOCAL_HTTPS=true/i' /app/.env.nanobox | envsubst > /app/.env.production" - "envsubst < /app/.env.nanobox > /app/.env.production"
- |- - |-
if [ -z "$LOCAL_DOMAIN" ] if [ -z "$LOCAL_DOMAIN" ]
then then
@ -186,7 +187,7 @@ worker.cron_only:
data.db: data.db:
image: nanobox/postgresql:9.5 image: nanobox/postgresql:9.6
cron: cron:
- id: backup - id: backup
@ -196,11 +197,11 @@ data.db:
gzip | gzip |
curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/backup-${HOSTNAME}-$(date -u +%Y-%m-%d.%H-%M-%S).sql.gz --data-binary @- && curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/backup-${HOSTNAME}-$(date -u +%Y-%m-%d.%H-%M-%S).sql.gz --data-binary @- &&
curl -k -s -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/ | curl -k -s -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/ |
json_pp | sed 's/,/\n/g' |
grep ${HOSTNAME} | grep ${HOSTNAME} |
sort | sort |
head -n-${BACKUP_COUNT:-1} | head -n-${BACKUP_COUNT:-1} |
sed 's/.*: "\(.*\)".*/\1/' | sed 's/.*: \?"\(.*\)".*/\1/' |
while read file while read file
do do
curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/${file} -X DELETE curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/${file} -X DELETE
@ -208,7 +209,7 @@ data.db:
data.redis: data.redis:
image: nanobox/redis:3.0 image: nanobox/redis:4.0
cron: cron:
- id: backup - id: backup
@ -216,11 +217,11 @@ data.redis:
command: | command: |
curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/backup-${HOSTNAME}-$(date -u +%Y-%m-%d.%H-%M-%S).rdb --data-binary @/data/var/db/redis/dump.rdb && curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/backup-${HOSTNAME}-$(date -u +%Y-%m-%d.%H-%M-%S).rdb --data-binary @/data/var/db/redis/dump.rdb &&
curl -k -s -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/ | curl -k -s -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/ |
json_pp | sed 's/,/\n/g' |
grep ${HOSTNAME} | grep ${HOSTNAME} |
sort | sort |
head -n-${BACKUP_COUNT:-1} | head -n-${BACKUP_COUNT:-1} |
sed 's/.*: "\(.*\)".*/\1/' | sed 's/.*: \?"\(.*\)".*/\1/' |
while read file while read file
do do
curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/${file} -X DELETE curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/${file} -X DELETE
@ -237,11 +238,11 @@ data.storage:
tar cz -C /data/var/db/unfs/ . | tar cz -C /data/var/db/unfs/ . |
curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/backup-${HOSTNAME}-$(date -u +%Y-%m-%d.%H-%M-%S).tgz --data-binary @- && curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/backup-${HOSTNAME}-$(date -u +%Y-%m-%d.%H-%M-%S).tgz --data-binary @- &&
curl -k -s -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/ | curl -k -s -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/ |
json_pp | sed 's/,/\n/g' |
grep ${HOSTNAME} | grep ${HOSTNAME} |
sort | sort |
head -n-${BACKUP_COUNT:-1} | head -n-${BACKUP_COUNT:-1} |
sed 's/.*: "\(.*\)".*/\1/' | sed 's/.*: \?"\(.*\)".*/\1/' |
while read file while read file
do do
curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/${file} -X DELETE curl -k -H "X-AUTH-TOKEN: ${WAREHOUSE_DATA_HOARDER_TOKEN}" https://${WAREHOUSE_DATA_HOARDER_HOST}:7410/blobs/${file} -X DELETE

View File

@ -58,15 +58,21 @@ http {
proxy_pass_header Server; proxy_pass_header Server;
proxy_pass http://rails; proxy_pass http://rails;
proxy_buffering off; proxy_buffering on;
proxy_redirect off; proxy_redirect off;
proxy_http_version 1.1; proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade; proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection $connection_upgrade; proxy_set_header Connection $connection_upgrade;
proxy_cache CACHE;
proxy_cache_valid 200 7d;
proxy_cache_use_stale error timeout updating http_500 http_502 http_503 http_504;
tcp_nodelay on; tcp_nodelay on;
} }
} }
proxy_cache_path /data/var/cache/nginx levels=1:2 keys_zone=CACHE:10m inactive=7d max_size=1g;
error_page 500 501 502 503 504 /500.html; error_page 500 501 502 503 504 /500.html;
} }