module CSVImportable
Constants
- MAX_CSV_ROWS
Attributes
Public Class Methods
Source
# File app/models/concerns/csv_importable.rb, line 233 def self.model_name ActiveModel::Name.new( self, nil, [@import_type.camelize, @record_type.singularize.camelize].sort.join ) end
Public Instance Methods
Source
# File app/models/concerns/csv_importable.rb, line 212 def count_columns %i[ new_record_count changed_record_count exact_duplicate_record_count ].freeze end
Source
# File app/models/concerns/csv_importable.rb, line 120 def csv csv_data end
Needed so that validations match the form field name.
Source
# File app/models/concerns/csv_importable.rb, line 100 def csv=(source) if persisted? raise "Cannot change the CSV file for an existing import. " \ "Create a new import instead." end if source.is_a?(ActionDispatch::Http::UploadedFile) # CSV files exported from Excel may have a BOM. # https://en.wikipedia.org/wiki/Byte_order_mark # e.g. if you create a new class import from scratch in Excel on Mac v16, # save the file as CSV, and upload it. self.csv_data = source.to_io.tap(&:set_encoding_by_bom).read self.csv_filename = source&.original_filename self.rows_count = csv_data_object&.count else raise ArgumentError, "Expected an uploaded file, got #{source}" end end
Assign an uploaded CSV file to this import.
Reads the uploaded file into {Import::CSVData}, stores the original filename, and updates {#rows_count} based on the parsed CSV data.
If the file contains a UTF byte-order mark (BOM) (common when exporting from Excel), the encoding is detected and handled before reading.
Raises an error if called on a persisted record, as changing the CSV file for an existing import is not allowed.
@param source [ActionDispatch::Http::UploadedFile] the uploaded CSV file @raise [RuntimeError] if called on a persisted record @raise [ArgumentError] if source is not an uploaded file
Source
# File app/models/concerns/csv_importable.rb, line 124 def csv_data_object @csv_data_object ||= Import::CSVData.new(csv_data) end
Source
# File app/models/concerns/csv_importable.rb, line 175 def csv_has_records return unless csv_data csv_has_no_records = csv_data_object.empty? || (csv_data_object.count == 1 && csv_data_object.has_instruction_row?) errors.add(:csv, :empty) if csv_has_no_records end
Source
# File app/models/concerns/csv_importable.rb, line 167 def csv_is_not_too_large return unless csv_data if rows_count > MAX_CSV_ROWS errors.add(:csv, :too_many_rows, count: MAX_CSV_ROWS) end end
Source
# File app/models/concerns/csv_importable.rb, line 163 def csv_is_valid errors.add(:csv, :invalid) unless csv_data_object.well_formed? end
Source
# File app/models/concerns/csv_importable.rb, line 128 def csv_removed? csv_removed_at != nil end
Source
# File app/models/concerns/csv_importable.rb, line 220 def ensure_processed_with_count_statistics if processed? && count_columns.any? { |column| send(column).nil? } raise "Count statistics must be set for a processed import." end end
Source
# File app/models/concerns/csv_importable.rb, line 226 def join_table_class(import_type, record_type) Class.new(ApplicationRecord) do @import_type = import_type.to_s.pluralize @record_type = record_type.to_s self.table_name = [@import_type, @record_type.pluralize].sort.join("_") def self.model_name ActiveModel::Name.new( self, nil, [@import_type.camelize, @record_type.singularize.camelize].sort.join ) end end end
Source
# File app/models/concerns/csv_importable.rb, line 243 def link_records_by_type(type, records) import_type = self.class.name.underscore type = type.to_s join_table_class(import_type, type).import( ["#{type.singularize}_id", "#{import_type}_id"], records.map(&:id).product([id]).uniq, on_duplicate_key_ignore: true ) end
Source
# File app/models/concerns/csv_importable.rb, line 153 def load_serialized_errors!(limit: nil) return if serialized_errors.blank? serialized_errors .then { limit ? it.first(limit) : it } .each do |attribute, messages| messages.each { errors.add(attribute, _1) } end end
Source
# File app/models/concerns/csv_importable.rb, line 132 def parse_rows! return if invalid? self.rows = csv_data_object.records.map { |row_data| parse_row(row_data) } if invalid? self.serialized_errors = errors.to_hash self.status = :rows_are_invalid save!(validate: false) end end
Source
# File app/models/concerns/csv_importable.rb, line 144 def processed? processed_at != nil end
Source
# File app/models/concerns/csv_importable.rb, line 148 def remove! return if csv_removed? update!(csv_data: nil, csv_removed_at: Time.zone.now) end
Source
# File app/models/concerns/csv_importable.rb, line 184 def rows_are_valid return unless rows rows.each(&:validate) check_rows_are_unique row_offset = csv_data_object.has_instruction_row? ? 3 : 2 rows.each.with_index do |row, index| next if row.errors.empty? # The first row is the header and the index is 0-based, so we add two # to match what the user sees in the spreadsheet formatted_errors = row.errors.map do |error| if error.attribute == :base error.message else "<code>#{error.attribute}</code>: #{error.message}" end end errors.add("row_#{index + row_offset}".to_sym, formatted_errors) end end