大量の cvs データを (AR に直接ではなく、いくつかフェッチした後に) インポートしたいのですが、コードが非常に遅くなります。
def csv_import
require 'csv'
file = File.open("/#{Rails.public_path}/uploads/shate.csv")
csv = CSV.open(file, "r:ISO-8859-15:UTF-8", {:col_sep => ";", :row_sep => :auto, :headers => :first_row})
csv.each do |row|
#ename,esupp= row[1].split(/_/)
#(ename,esupp,foo) = row[1]..split('_')
abrakadabra = row[0].to_s()
(ename,esupp) = abrakadabra.split(/_/)
eprice = row[6]
eqnt = row[1]
# logger.info("1) ")
# logger.info(ename)
# logger.info("---")
# logger.info(esupp)
#----
#ename = row[4]
#eprice = row[7]
#eqnt = row[10]
#esupp = row[12]
if ename.present? && ename.size>3
search_condition = "*" + ename.upcase + "*"
if esupp.present?
#supplier = @suppliers.find{|item| item['SUP_BRAND'] =~ Regexp.new(".*#{esupp}.*") }
supplier = Supplier.where("SUP_BRAND like ?", "%#{esupp}%").first
logger.warn("!!! *** supp !!!")
#logger.warn(supplier)
end
if supplier.present?
@search = ArtLookup.find(:all, :conditions => ['MATCH (ARL_SEARCH_NUMBER) AGAINST(? IN BOOLEAN MODE)', search_condition.gsub(/[^0-9A-Za-z]/, '')])
@articles = Article.find(:all, :conditions => { :ART_ID => @search.map(&:ARL_ART_ID)})
@art_concret = @articles.find_all{|item| item.ART_ARTICLE_NR.gsub(/[^0-9A-Za-z]/, '').include?(ename.gsub(/[^0-9A-Za-z]/, '')) }
@aa = @art_concret.find{|item| item['ART_SUP_ID']==supplier.SUP_ID} #| @articles
if @aa.present?
@art = Article.find_by_ART_ID(@aa)
end
if @art.present?
@art.PRICEM = eprice
@art.QUANTITYM = eqnt
@art.datetime_of_update = DateTime.now
@art.save
end
end
logger.warn("------------------------------")
end
#logger.warn(esupp)
end
end
これだけ削除して取得しても遅いです。
def csv_import
require 'csv'
file = File.open("/#{Rails.public_path}/uploads/shate.csv")
csv = CSV.open(file, "r:ISO-8859-15:UTF-8", {:col_sep => ";", :row_sep => :auto, :headers => :first_row})
csv.each do |row|
end
end
fastcsv を使用して速度を上げるのを手伝ってくれる人はいますか?