* Add table for data version and migrate * Modify migration and re-migrate * Create data_version.rb Adds a model for DataVersion * Add aws-sdk-s3 and create aws_service.rb AwsService handles streaming game image files from the Granblue Fantasy server to our S3 instance. * Add importers The Importer libraries take CSV data and import them into the database for each type. We currently support characters, summons and weapons. * Add downloaders Downloaders take Granblue IDs and download images for those items from the Granblue Fantasy server in all relevant sizes. Downloaders can download to disk or stream the file directly to S3. * Create data_importer.rb * Fetches a list of all CSV files present in the updates folder * Checks which have already been imported * Sends unimported data to the appropriate Importer to handle * Create download_manager.rb Creates an appropriate downloader for each Granblue ID it receives * Update download_images.rake Most of this task has been extracted into the Downloader libraries * Update import_data.rake * Create deploy.rake This task is to be run as a post-deploy script. It checks for new unimported data, imports it, then downloads the relevant images to S3 or local disk depending on the parameters provided. * Update credentials.yml.enc * Began working on a README and added example CSVs * Modify importer to handle updates This way we can also add FLBs and other uncaps easier. * Updates only require values that will change When updating a row, fields that don't have a provided value will not be changed * Rebuild search indices in post deploy * Clean up logs with LoggingHelper * More logging adjustments Trying to get a nice-looking output * Change some ASCII characters * Final ASCII changes * Fix issues with Summon and Weapon importers * Finish README for contributing
139 lines
3.9 KiB
Ruby
139 lines
3.9 KiB
Ruby
# frozen_string_literal: true
|
|
|
|
module Granblue
|
|
module Importers
|
|
class BaseImporter
|
|
attr_reader :new_records, :updated_records
|
|
|
|
def initialize(file_path, test_mode: false, verbose: false, logger: nil)
|
|
@file_path = file_path
|
|
@test_mode = test_mode
|
|
@verbose = verbose
|
|
@logger = logger
|
|
@new_records = Hash.new { |h, k| h[k] = [] }
|
|
@updated_records = Hash.new { |h, k| h[k] = [] }
|
|
end
|
|
|
|
def import
|
|
CSV.foreach(@file_path, headers: true) do |row|
|
|
import_row(row)
|
|
end
|
|
{ new: @new_records, updated: @updated_records }
|
|
end
|
|
|
|
private
|
|
|
|
def import_row(row)
|
|
attributes = build_attributes(row)
|
|
# Remove nil values from attributes hash for updates
|
|
# Keep them for new records to ensure proper defaults
|
|
record = find_or_create_record(attributes)
|
|
track_record(record) if record
|
|
end
|
|
|
|
def find_or_create_record(attributes)
|
|
existing_record = model_class.find_by(granblue_id: attributes[:granblue_id])
|
|
|
|
if existing_record
|
|
if @test_mode
|
|
log_test_update(existing_record, attributes)
|
|
nil
|
|
else
|
|
# For updates, only include non-nil attributes
|
|
update_attributes = attributes.compact
|
|
was_updated = update_attributes.any? { |key, value| existing_record[key] != value }
|
|
existing_record.update!(update_attributes) if was_updated
|
|
[existing_record, was_updated]
|
|
end
|
|
else
|
|
if @test_mode
|
|
log_test_creation(attributes)
|
|
nil
|
|
else
|
|
# For new records, use all attributes including nil values
|
|
[model_class.create!(attributes), false]
|
|
end
|
|
end
|
|
end
|
|
|
|
def track_record(result)
|
|
record, was_updated = result
|
|
type = model_class.name.demodulize.downcase
|
|
|
|
if was_updated
|
|
@updated_records[type] << record.granblue_id
|
|
log_updated_record(record) if @verbose
|
|
else
|
|
@new_records[type] << record.granblue_id
|
|
log_new_record(record) if @verbose
|
|
end
|
|
end
|
|
|
|
def log_test_update(record, attributes)
|
|
# For test mode, show only the attributes that would be updated
|
|
update_attributes = attributes.compact
|
|
@logger&.send(:log_operation, "Update #{model_class.name} #{record.granblue_id}: #{update_attributes.inspect}")
|
|
end
|
|
|
|
def log_test_creation(attributes)
|
|
@logger&.send(:log_operation, "Create #{model_class.name}: #{attributes.inspect}")
|
|
end
|
|
|
|
def log_new_record(record)
|
|
puts "Created #{model_class.name} with ID: #{record.granblue_id}"
|
|
end
|
|
|
|
def log_updated_record(record)
|
|
puts "Updated #{model_class.name} with ID: #{record.granblue_id}"
|
|
end
|
|
|
|
def parse_value(value)
|
|
return nil if value.nil? || value.strip.empty?
|
|
|
|
value
|
|
end
|
|
|
|
def parse_integer(value)
|
|
return nil if value.nil? || value.strip.empty?
|
|
|
|
value.to_i
|
|
end
|
|
|
|
def parse_float(value)
|
|
return nil if value.nil? || value.strip.empty?
|
|
|
|
value.to_f
|
|
end
|
|
|
|
def parse_boolean(value)
|
|
return nil if value.nil? || value.strip.empty?
|
|
|
|
value == 'true'
|
|
end
|
|
|
|
def parse_date(date_str)
|
|
return nil if date_str.nil? || date_str.strip.empty?
|
|
|
|
Date.parse(date_str) rescue nil
|
|
end
|
|
|
|
def parse_array(array_str)
|
|
return [] if array_str.nil? || array_str.strip.empty?
|
|
|
|
array_str.tr('{}', '').split(',')
|
|
end
|
|
|
|
def parse_integer_array(array_str)
|
|
parse_array(array_str).map(&:to_i)
|
|
end
|
|
|
|
def model_class
|
|
raise NotImplementedError, 'Subclasses must define model_class'
|
|
end
|
|
|
|
def build_attributes(row)
|
|
raise NotImplementedError, 'Subclasses must define build_attributes'
|
|
end
|
|
end
|
|
end
|
|
end
|