# app/services/ImportService.coffee { Product, Category } = require 'app/types/data' ProductService = require 'app/services/ProductService' CategoryService = require 'app/services/CategoryService' class ImportService constructor: -> @batchSize = 50 @maxImages = 5 importCSVData: (csvData, fieldMapping, domain, onProgress) -> log '🚀 Начало импорта CSV данных' try # Трансформация данных products = csvData.map (row, index) => @transformRowToProduct(row, fieldMapping, domain, index) # Фильтрация валидных товаров validProducts = products.filter (product) => product.name and product.sku and product.price > 0 log "✅ Валидных товаров: " + validProducts.length + " из " + products.length # Пакетная обработка return await @processProductsInBatches(validProducts, domain, onProgress) catch error log '❌ Ошибка импорта CSV данных:', error throw error transformRowToProduct: (row, fieldMapping, domain, index) -> product = new Product() # Базовые поля skuValue = row[@getFieldByMapping(row, fieldMapping, 'sku')] or "temp_" + index product._id = "product:" + skuValue product.name = @getFieldByMapping(row, fieldMapping, 'name') || '' product.sku = skuValue product.price = @parsePrice(@getFieldByMapping(row, fieldMapping, 'price')) product.oldPrice = @parsePrice(@getFieldByMapping(row, fieldMapping, 'oldPrice')) product.brand = @getFieldByMapping(row, fieldMapping, 'brand') || '' product.category = @getFieldByMapping(row, fieldMapping, 'category') || '' product.description = @getFieldByMapping(row, fieldMapping, 'description') || '' product.domains = [domain] # Обработка изображений product.images = @processProductImages(row, product._id) # Rich-контент richContentField = @getFieldByMapping(row, fieldMapping, 'richContent') if richContentField try product.richContent = @jsonToMarkdown(JSON.parse(richContentField)) catch error log '⚠️ Ошибка парсинга rich-контента:', error # Атрибуты product.attributes = @extractProductAttributes(row, fieldMapping) return product getFieldByMapping: (row, fieldMapping, targetField) -> for csvField, mappedField of fieldMapping if mappedField == targetField and row[csvField] return row[csvField] return '' parsePrice: (priceStr) -> return null unless priceStr price = parseFloat(priceStr.toString().replace(',', '.').replace(/\s/g, '')) return if isNaN(price) then null else price processProductImages: (row, docId) -> images = [] # Главное изображение mainImage = @getFieldByMapping(row, @fieldMapping, 'mainImage') if mainImage images.push { url: mainImage type: 'main' order: 0 filename: "main-" + Date.now() + ".jpg" } # Дополнительные изображения additionalImages = @getFieldByMapping(row, @fieldMapping, 'additionalImages') if additionalImages imageUrls = additionalImages.split('\n').slice(0, @maxImages) imageUrls.forEach (imgUrl, index) -> if imgUrl.trim() images.push { url: imgUrl.trim() type: 'additional' order: index + 1 filename: "additional-" + index + "-" + Date.now() + ".jpg" } return images extractProductAttributes: (row, fieldMapping) -> attributes = {} # Все поля, не попавшие в маппинг, становятся атрибутами for field, value of row if value and not fieldMapping[field] attributes[field] = value.toString().trim() return attributes processProductsInBatches: (products, domain, onProgress) -> batches = [] for i in [0...products.length] by @batchSize batches.push(products.slice(i, i + @batchSize)) processed = 0 results = { success: [] errors: [] } processBatch = (batch) => try # Создание отсутствующих категорий await @ensureCategoriesExist(batch, domain) # Сохранение товаров batchResults = await ProductService.bulkSaveProducts(batch) results.success = results.success.concat(batchResults.success) results.errors = results.errors.concat(batchResults.errors) processed += batch.length # Прогресс if onProgress onProgress({ processed: processed total: products.length percentage: Math.round((processed / products.length) * 100) results: results }) return batchResults catch error log '❌ Ошибка обработки пакета:', error batchErrors = batch.map (product, index) => { product: product, error: error.message, index: index } results.errors = results.errors.concat(batchErrors) return { success: [], errors: batchErrors } # Последовательная обработка пакетов for batch, index in batches await processBatch(batch, index) log "✅ Импорт завершен: Успешно " + results.success.length + ", Ошибок " + results.errors.length return results ensureCategoriesExist: (products, domain) -> categories = [] products.forEach (product) -> if product.category and categories.indexOf(product.category) == -1 categories.push(product.category) for categoryName in categories try slug = @slugify(categoryName) existingCategory = await CategoryService.getCategoryBySlug(slug) if not existingCategory category = new Category() category._id = "category:" + slug category.name = categoryName category.slug = slug category.domains = [domain] category.type = 'category' category.active = true category.order = 0 await CategoryService.saveCategory(category) log "✅ Создана категория: " + categoryName catch error log "❌ Ошибка создания категории " + categoryName + ":", error slugify: (text) -> return '' unless text text.toString().toLowerCase() .replace(/\s+/g, '-') .replace(/[^\w\-]+/g, '') .replace(/\-\-+/g, '-') .replace(/^-+/, '') .replace(/-+$/, '') jsonToMarkdown: (richContent) -> # Преобразование JSON rich-контента в Markdown markdown = '' if richContent and richContent.content richContent.content.forEach (block) -> if block.widgetName == 'raTextBlock' and block.text and block.text.items block.text.items.forEach (item) -> if item.type == 'text' and item.content markdown += item.content + '\n\n' else if item.type == 'br' markdown += '\n' return markdown.trim() module.exports = new ImportService()