Update config files (#189)

* Update weapon series migration

This update fixes MigrateWeaponSeries from 20250218 such that it can be run on an empty database without throwing errors.

* Update .gitignore

Hide backups and logs directories, since we’ll be storing these in the project folder. Also hide mise’s .local directory.

* Change NewRelic log directory

Moved from log/ to logs/

* Add rake task for backing up/restoring prod db

* Rubocop fixes

* Fix error where :preview_state didn’t have an attribute

* Add supervisord ini

This uses my local paths, so we should try to abstract that away later.

* Ignore mise.toml
This commit is contained in:
Justin Edmund 2025-02-27 23:13:57 -08:00 committed by GitHub
parent b2d2952b35
commit 4d3c1a800b
No known key found for this signature in database
GPG key ID: B5690EEEBB952194
6 changed files with 76 additions and 10 deletions

6
.gitignore vendored
View file

@ -37,9 +37,12 @@
# Ignore master key for decrypting credentials and more. # Ignore master key for decrypting credentials and more.
/config/master.key /config/master.key
# Ignore exported and downloaded files # Ignore specific directories
/.local
/export /export
/download /download
/backups
/logs
.DS_Store .DS_Store
@ -55,3 +58,4 @@ config/application.yml
# Ignore AI Codebase-generated files # Ignore AI Codebase-generated files
codebase.md codebase.md
mise.toml

View file

@ -78,6 +78,7 @@ class Party < ApplicationRecord
include GranblueEnums include GranblueEnums
# Define preview_state as an enum. # Define preview_state as an enum.
attribute :preview_state, :integer
enum :preview_state, { pending: 0, queued: 1, in_progress: 2, generated: 3, failed: 4 } enum :preview_state, { pending: 0, queued: 1, in_progress: 2, generated: 3, failed: 4 }
# ActiveRecord Associations # ActiveRecord Associations
@ -400,7 +401,7 @@ class Party < ApplicationRecord
# @return [void] # @return [void]
def skills_are_unique def skills_are_unique
validate_uniqueness_of_associations([skill0, skill1, skill2, skill3], validate_uniqueness_of_associations([skill0, skill1, skill2, skill3],
[:skill0, :skill1, :skill2, :skill3], %i[skill0 skill1 skill2 skill3],
:job_skills) :job_skills)
end end
@ -410,7 +411,7 @@ class Party < ApplicationRecord
# @return [void] # @return [void]
def guidebooks_are_unique def guidebooks_are_unique
validate_uniqueness_of_associations([guidebook1, guidebook2, guidebook3], validate_uniqueness_of_associations([guidebook1, guidebook2, guidebook3],
[:guidebook1, :guidebook2, :guidebook3], %i[guidebook1 guidebook2 guidebook3],
:guidebooks) :guidebooks)
end end
@ -438,7 +439,7 @@ class Party < ApplicationRecord
def update_element! def update_element!
main_weapon = weapons.detect { |gw| gw.position.to_i == -1 } main_weapon = weapons.detect { |gw| gw.position.to_i == -1 }
new_element = main_weapon&.weapon&.element new_element = main_weapon&.weapon&.element
update_column(:element, new_element) if new_element.present? && self.element != new_element update_column(:element, new_element) if new_element.present? && element != new_element
end end
## ##
@ -449,7 +450,7 @@ class Party < ApplicationRecord
# @return [void] # @return [void]
def update_extra! def update_extra!
new_extra = weapons.any? { |gw| GridWeapon::EXTRA_POSITIONS.include?(gw.position.to_i) } new_extra = weapons.any? { |gw| GridWeapon::EXTRA_POSITIONS.include?(gw.position.to_i) }
update_column(:extra, new_extra) if self.extra != new_extra update_column(:extra, new_extra) if extra != new_extra
end end
## ##

View file

@ -25,6 +25,8 @@ common: &default_settings
# agent_enabled: false # agent_enabled: false
log_file_path: logs/
# Logging level for log/newrelic_agent.log # Logging level for log/newrelic_agent.log
log_level: info log_level: info

View file

@ -4,20 +4,20 @@
# the maximum value specified for Puma. Default is set to 5 threads for minimum # the maximum value specified for Puma. Default is set to 5 threads for minimum
# and maximum; this matches the default thread size of Active Record. # and maximum; this matches the default thread size of Active Record.
# #
max_threads_count = ENV.fetch("RAILS_MAX_THREADS") { 5 } max_threads_count = ENV.fetch('RAILS_MAX_THREADS') { 5 }
min_threads_count = ENV.fetch("RAILS_MIN_THREADS") { max_threads_count } min_threads_count = ENV.fetch('RAILS_MIN_THREADS') { max_threads_count }
threads min_threads_count, max_threads_count threads min_threads_count, max_threads_count
# Specifies the `port` that Puma will listen on to receive requests; default is 3000. # Specifies the `port` that Puma will listen on to receive requests; default is 3000.
# #
port ENV.fetch("PORT") { 3000 } port ENV.fetch('PORT', 3000)
# Specifies the `environment` that Puma will run in. # Specifies the `environment` that Puma will run in.
# #
environment ENV.fetch("RAILS_ENV") { "development" } environment ENV.fetch('RAILS_ENV') { 'development' }
# Specifies the `pidfile` that Puma will use. # Specifies the `pidfile` that Puma will use.
pidfile ENV.fetch("PIDFILE") { "tmp/pids/server.pid" } pidfile ENV.fetch('PIDFILE') { 'tmp/pids/server.pid' }
# Specifies the number of `workers` to boot in clustered mode. # Specifies the number of `workers` to boot in clustered mode.
# Workers are forked web server processes. If using threads and workers together # Workers are forked web server processes. If using threads and workers together

42
lib/tasks/database.rake Normal file
View file

@ -0,0 +1,42 @@
namespace :db do
desc 'Backup remote PostgreSQL database'
task :backup do
remote_host = ENV.fetch('REMOTE_DB_HOST', 'roundhouse.proxy.rlwy.net')
remote_port = ENV.fetch('REMOTE_DB_PORT', '54629')
remote_user = ENV.fetch('REMOTE_DB_USER', 'postgres')
remote_db = ENV.fetch('REMOTE_DB_NAME', 'railway')
password = ENV.fetch('REMOTE_DB_PASSWORD') { raise 'Please set REMOTE_DB_PASSWORD' }
backup_dir = File.expand_path('backups')
FileUtils.mkdir_p(backup_dir)
backup_file = File.join(backup_dir, "#{Time.now.strftime('%Y%m%d_%H%M%S')}-prod-backup.tar")
cmd = %W[
pg_dump -h #{remote_host} -p #{remote_port} -U #{remote_user} -d #{remote_db} -F t
--no-owner --exclude-extension=timescaledb --exclude-extension=timescaledb_toolkit
].join(' ')
puts "Backing up remote database to #{backup_file}..."
system({ 'PGPASSWORD' => password }, "#{cmd} > #{backup_file}")
puts 'Backup completed!'
end
desc 'Restore PostgreSQL database from backup'
task :restore, [:backup_file] => [:environment] do |_, args|
local_user = ENV.fetch('LOCAL_DB_USER', 'justin')
local_db = ENV.fetch('LOCAL_DB_NAME', 'hensei_dev')
# Use the specified backup file or find the most recent one
backup_dir = File.expand_path('backups')
backup_file = args[:backup_file] || Dir.glob("#{backup_dir}/*-prod-backup.tar").max
raise 'Backup file not found. Please specify a valid backup file.' unless backup_file && File.exist?(backup_file)
puts "Restoring database from #{backup_file}..."
system("pg_restore --no-owner --role=#{local_user} --disable-triggers -U #{local_user} -d #{local_db} #{backup_file}")
puts 'Restore completed!'
end
desc 'Backup remote database and restore locally'
task backup_and_restore: %i[backup restore]
end

View file

@ -0,0 +1,17 @@
[program:hensei-api]
command=/opt/homebrew/bin/mise run s
process_name=%(program_name)s
numprocs=1
directory=/Users/justin/Developer/Granblue/hensei-api
environment=HOME="/Users/justin",MISE_CONFIG_ROOT="/Users/justin/Developer/Granblue/hensei-api",RAILS_ENV="development"
autostart=true
autorestart=unexpected
stopsignal=TERM
user=justin
stdout_logfile=/Users/justin/Developer/Granblue/hensei-api/logs/hensei-api.stdout.log
stdout_logfile_maxbytes=500KB
stdout_logfile_backups=10
stderr_logfile=/Users/justin/Developer/Granblue/hensei-api/logs/hensei-api.stderr.log
stderr_logfile_maxbytes=500KB
stderr_logfile_backups=10
serverurl=AUTO