|
@@ -1288,9 +1288,208 @@ def generate_product_excel_background():
|
|
|
|
|
|
|
|
# -------------------------------------------------------------------------------------------------
|
|
# -------------------------------------------------------------------------------------------------
|
|
|
|
|
|
|
|
|
|
+
|
|
|
|
|
+# THIS DOES NOT DELETE THE RECORD EVEN IF A RECORD IS ABSENT IN THE EXCEL FILE
|
|
|
|
|
+# class ProductUploadExcelView(APIView):
|
|
|
|
|
+# """
|
|
|
|
|
+# POST API to upload an Excel file.
|
|
|
|
|
+# """
|
|
|
|
|
+# parser_classes = (MultiPartParser, FormParser)
|
|
|
|
|
+
|
|
|
|
|
+# def post(self, request, *args, **kwargs):
|
|
|
|
|
+# file_obj = request.FILES.get('file')
|
|
|
|
|
+# if not file_obj:
|
|
|
|
|
+# return Response({'error': 'No file provided'}, status=status.HTTP_400_BAD_REQUEST)
|
|
|
|
|
+
|
|
|
|
|
+# try:
|
|
|
|
|
+# # ... (Upload and DB processing logic remains unchanged)
|
|
|
|
|
+
|
|
|
|
|
+# # Read all sheets from Excel file
|
|
|
|
|
+# excel_file = pd.ExcelFile(file_obj)
|
|
|
|
|
+
|
|
|
|
|
+# # Check if required sheets exist
|
|
|
|
|
+# if 'Products' not in excel_file.sheet_names:
|
|
|
|
|
+# logger.error(f"Upload failed: Missing 'Products' sheet in file.")
|
|
|
|
|
+# return Response({
|
|
|
|
|
+# 'error': "Missing 'Products' sheet",
|
|
|
|
|
+# 'available_sheets': excel_file.sheet_names
|
|
|
|
|
+# }, status=status.HTTP_400_BAD_REQUEST)
|
|
|
|
|
+
|
|
|
|
|
+# df_products = pd.read_excel(excel_file, sheet_name='Products')
|
|
|
|
|
+# df_products.columns = [c.strip().lower().replace(' ', '_') for c in df_products.columns]
|
|
|
|
|
+
|
|
|
|
|
+# expected_product_cols = {
|
|
|
|
|
+# 'item_id', 'product_name', 'product_long_description',
|
|
|
|
|
+# 'product_short_description', 'product_type', 'image_path'
|
|
|
|
|
+# }
|
|
|
|
|
+
|
|
|
|
|
+# if not expected_product_cols.issubset(df_products.columns):
|
|
|
|
|
+# logger.error(f"Upload failed: Missing required columns in Products sheet.")
|
|
|
|
|
+# return Response({
|
|
|
|
|
+# 'error': 'Missing required columns in Products sheet',
|
|
|
|
|
+# 'required_columns': list(expected_product_cols),
|
|
|
|
|
+# 'found_columns': list(df_products.columns)
|
|
|
|
|
+# }, status=status.HTTP_400_BAD_REQUEST)
|
|
|
|
|
+
|
|
|
|
|
+# df_attributes = None
|
|
|
|
|
+# has_attributes_sheet = 'Attribute_values' in excel_file.sheet_names
|
|
|
|
|
+
|
|
|
|
|
+# if has_attributes_sheet:
|
|
|
|
|
+# df_attributes = pd.read_excel(excel_file, sheet_name='Attribute_values')
|
|
|
|
|
+# df_attributes.columns = [c.strip().lower().replace(' ', '_') for c in df_attributes.columns]
|
|
|
|
|
+
|
|
|
|
|
+# expected_attr_cols = {'item_id', 'attribute_name', 'original_value'}
|
|
|
|
|
+# if not expected_attr_cols.issubset(df_attributes.columns):
|
|
|
|
|
+# logger.error(f"Upload failed: Missing required columns in Attribute_values sheet.")
|
|
|
|
|
+# return Response({
|
|
|
|
|
+# 'error': 'Missing required columns in Attribute_values sheet',
|
|
|
|
|
+# 'required_columns': list(expected_attr_cols),
|
|
|
|
|
+# 'found_columns': list(df_attributes.columns)
|
|
|
|
|
+# }, status=status.HTTP_400_BAD_REQUEST)
|
|
|
|
|
+
|
|
|
|
|
+# products_created = 0
|
|
|
|
|
+# products_updated = 0
|
|
|
|
|
+# attributes_created = 0
|
|
|
|
|
+# attributes_updated = 0
|
|
|
|
|
+# products_failed = 0
|
|
|
|
|
+# attributes_failed = 0
|
|
|
|
|
+# errors = []
|
|
|
|
|
+
|
|
|
|
|
+# with transaction.atomic():
|
|
|
|
|
+# for idx, row in df_products.iterrows():
|
|
|
|
|
+# item_id = str(row.get('item_id', '')).strip()
|
|
|
|
|
+# product_type = str(row.get('product_type', '')).strip()
|
|
|
|
|
+
|
|
|
|
|
+# if not item_id:
|
|
|
|
|
+# products_failed += 1
|
|
|
|
|
+# errors.append(f"Products Row {idx + 2}: Missing item_id")
|
|
|
|
|
+# continue
|
|
|
|
|
+
|
|
|
|
|
+# try:
|
|
|
|
|
+# if product_type:
|
|
|
|
|
+# ProductType.objects.get_or_create(name=product_type)
|
|
|
|
|
+
|
|
|
|
|
+# defaults = {
|
|
|
|
|
+# 'product_name': str(row.get('product_name', '')),
|
|
|
|
|
+# 'product_long_description': str(row.get('product_long_description', '')),
|
|
|
|
|
+# 'product_short_description': str(row.get('product_short_description', '')),
|
|
|
|
|
+# 'product_type': product_type,
|
|
|
|
|
+# 'image_path': str(row.get('image_path', '')),
|
|
|
|
|
+# }
|
|
|
|
|
+
|
|
|
|
|
+# obj, created = Product.objects.update_or_create(item_id=item_id, defaults=defaults)
|
|
|
|
|
+
|
|
|
|
|
+# if created: products_created += 1
|
|
|
|
|
+# else: products_updated += 1
|
|
|
|
|
+# except Exception as e:
|
|
|
|
|
+# products_failed += 1
|
|
|
|
|
+# errors.append(f"Products Row {idx + 2} (item_id: {item_id}): {str(e)}")
|
|
|
|
|
+# logger.error(f"Error processing product {item_id} in Products sheet: {e}")
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+# if has_attributes_sheet and df_attributes is not None:
|
|
|
|
|
+# item_ids_in_attrs = df_attributes['item_id'].astype(str).unique()
|
|
|
|
|
+# existing_products = {p.item_id: p for p in Product.objects.filter(item_id__in=item_ids_in_attrs)}
|
|
|
|
|
+
|
|
|
|
|
+# for idx, row in df_attributes.iterrows():
|
|
|
|
|
+# item_id = str(row.get('item_id', '')).strip()
|
|
|
|
|
+# attribute_name = str(row.get('attribute_name', '')).strip()
|
|
|
|
|
+# original_value = str(row.get('original_value', '')).strip()
|
|
|
|
|
+
|
|
|
|
|
+# if not item_id or not attribute_name:
|
|
|
|
|
+# attributes_failed += 1
|
|
|
|
|
+# errors.append(f"Attribute_values Row {idx + 2}: Missing item_id or attribute_name")
|
|
|
|
|
+# continue
|
|
|
|
|
+
|
|
|
|
|
+# product = existing_products.get(item_id)
|
|
|
|
|
+# if not product:
|
|
|
|
|
+# attributes_failed += 1
|
|
|
|
|
+# errors.append(f"Attribute_values Row {idx + 2}: Product with item_id '{item_id}' not found. Make sure it exists in Products sheet.")
|
|
|
|
|
+# continue
|
|
|
|
|
+
|
|
|
|
|
+# try:
|
|
|
|
|
+# attr_obj, created = ProductAttributeValue.objects.update_or_create(
|
|
|
|
|
+# product=product,
|
|
|
|
|
+# attribute_name=attribute_name,
|
|
|
|
|
+# defaults={'original_value': original_value}
|
|
|
|
|
+# )
|
|
|
|
|
+# if created: attributes_created += 1
|
|
|
|
|
+# else: attributes_updated += 1
|
|
|
|
|
+# except Exception as e:
|
|
|
|
|
+# attributes_failed += 1
|
|
|
|
|
+# errors.append(f"Attribute_values Row {idx + 2} (item_id: {item_id}, attribute: {attribute_name}): {str(e)}")
|
|
|
|
|
+# logger.error(f"Error processing attribute {attribute_name} for product {item_id}: {e}")
|
|
|
|
|
+
|
|
|
|
|
+# # Prepare response data
|
|
|
|
|
+# response_data = {
|
|
|
|
|
+# 'message': 'Upload completed',
|
|
|
|
|
+# 'products': {
|
|
|
|
|
+# 'created': products_created, 'updated': products_updated, 'failed': products_failed,
|
|
|
|
|
+# 'total_processed': products_created + products_updated + products_failed
|
|
|
|
|
+# },
|
|
|
|
|
+# 'attribute_values': {
|
|
|
|
|
+# 'created': attributes_created, 'updated': attributes_updated, 'failed': attributes_failed,
|
|
|
|
|
+# 'total_processed': attributes_created + attributes_updated + attributes_failed
|
|
|
|
|
+# } if has_attributes_sheet else {'message': 'Attribute_values sheet not found in Excel file'},
|
|
|
|
|
+# 'generated_excel_status': 'Excel generation started in the background.'
|
|
|
|
|
+# }
|
|
|
|
|
+
|
|
|
|
|
+# if errors:
|
|
|
|
|
+# response_data['errors'] = errors[:50]
|
|
|
|
|
+# if len(errors) > 50:
|
|
|
|
|
+# response_data['errors'].append(f"... and {len(errors) - 50} more errors")
|
|
|
|
|
+
|
|
|
|
|
+# upload_status = status.HTTP_201_CREATED if products_failed == 0 and attributes_failed == 0 else status.HTTP_207_MULTI_STATUS
|
|
|
|
|
+
|
|
|
|
|
+# # Start background thread for Excel generation if upload was successful
|
|
|
|
|
+# if products_failed == 0 and attributes_failed == 0:
|
|
|
|
|
+# logger.info("API call successful. Triggering background Excel generation thread is commented for now !!!!.")
|
|
|
|
|
+# # threading.Thread(target=generate_product_excel_background, daemon=True).start()
|
|
|
|
|
+
|
|
|
|
|
+# ## FIX: Update monitoring URLs to point to the new generated_outputs subfolder
|
|
|
|
|
+# # response_data['generated_excel_status'] = 'Background Excel generation triggered successfully.'
|
|
|
|
|
+# # response_data['monitoring'] = {
|
|
|
|
|
+# # 'excel_file': os.path.join(OUTPUT_URL, EXCEL_FILE_NAME),
|
|
|
|
|
+# # 'status_file': os.path.join(OUTPUT_URL, STATUS_FILE_NAME),
|
|
|
|
|
+# # 'log_file': os.path.join(OUTPUT_URL, LOG_FILE_NAME),
|
|
|
|
|
+# # 'note': 'These files will be available once the background process completes.'
|
|
|
|
|
+# # }
|
|
|
|
|
+# else:
|
|
|
|
|
+# logger.warning(f"API call finished with errors ({products_failed} products, {attributes_failed} attributes). Not triggering background excel generation.")
|
|
|
|
|
+# response_data['generated_excel_status'] = 'Background Excel generation was NOT triggered due to upload errors. Fix upload errors and re-upload.'
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+# return Response(response_data, status=upload_status)
|
|
|
|
|
+
|
|
|
|
|
+# except pd.errors.EmptyDataError:
|
|
|
|
|
+# logger.error('The uploaded Excel file is empty or invalid.')
|
|
|
|
|
+# return Response({'error': 'The uploaded Excel file is empty or invalid'}, status=status.HTTP_400_BAD_REQUEST)
|
|
|
|
|
+# except Exception as e:
|
|
|
|
|
+# logger.exception(f'An unexpected error occurred while processing the file.')
|
|
|
|
|
+# return Response({'error': f'An unexpected error occurred while processing the file: {str(e)}'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+
|
|
|
|
|
+# THIS MAKES THE DB IN SYNC WITH THE EXCEL. IF A PRODUCT IS NOT PRESENT IT GETS DELETED.
|
|
|
|
|
+
|
|
|
|
|
+from rest_framework.views import APIView
|
|
|
|
|
+from rest_framework.response import Response
|
|
|
|
|
+from rest_framework import status
|
|
|
|
|
+from rest_framework.parsers import MultiPartParser, FormParser
|
|
|
|
|
+from django.db import transaction
|
|
|
|
|
+import pandas as pd
|
|
|
|
|
+import logging
|
|
|
|
|
+import os
|
|
|
|
|
+# import threading # Uncomment if you use background excel generation
|
|
|
|
|
+from .models import Product, ProductType, ProductAttributeValue
|
|
|
|
|
+
|
|
|
|
|
+logger = logging.getLogger(__name__)
|
|
|
|
|
+
|
|
|
class ProductUploadExcelView(APIView):
|
|
class ProductUploadExcelView(APIView):
|
|
|
"""
|
|
"""
|
|
|
- POST API to upload an Excel file.
|
|
|
|
|
|
|
+ POST API to upload an Excel file and synchronize Products & Attributes with DB.
|
|
|
|
|
+ If a product is missing in Excel, it will be deleted from the database.
|
|
|
"""
|
|
"""
|
|
|
parser_classes = (MultiPartParser, FormParser)
|
|
parser_classes = (MultiPartParser, FormParser)
|
|
|
|
|
|
|
@@ -1300,136 +1499,169 @@ class ProductUploadExcelView(APIView):
|
|
|
return Response({'error': 'No file provided'}, status=status.HTTP_400_BAD_REQUEST)
|
|
return Response({'error': 'No file provided'}, status=status.HTTP_400_BAD_REQUEST)
|
|
|
|
|
|
|
|
try:
|
|
try:
|
|
|
- # ... (Upload and DB processing logic remains unchanged)
|
|
|
|
|
-
|
|
|
|
|
- # Read all sheets from Excel file
|
|
|
|
|
|
|
+ # Read all sheets
|
|
|
excel_file = pd.ExcelFile(file_obj)
|
|
excel_file = pd.ExcelFile(file_obj)
|
|
|
-
|
|
|
|
|
- # Check if required sheets exist
|
|
|
|
|
|
|
+
|
|
|
if 'Products' not in excel_file.sheet_names:
|
|
if 'Products' not in excel_file.sheet_names:
|
|
|
- logger.error(f"Upload failed: Missing 'Products' sheet in file.")
|
|
|
|
|
- return Response({
|
|
|
|
|
- 'error': "Missing 'Products' sheet",
|
|
|
|
|
- 'available_sheets': excel_file.sheet_names
|
|
|
|
|
- }, status=status.HTTP_400_BAD_REQUEST)
|
|
|
|
|
-
|
|
|
|
|
|
|
+ logger.error("Missing 'Products' sheet in uploaded file.")
|
|
|
|
|
+ return Response({
|
|
|
|
|
+ 'error': "Missing 'Products' sheet",
|
|
|
|
|
+ 'available_sheets': excel_file.sheet_names
|
|
|
|
|
+ }, status=status.HTTP_400_BAD_REQUEST)
|
|
|
|
|
+
|
|
|
df_products = pd.read_excel(excel_file, sheet_name='Products')
|
|
df_products = pd.read_excel(excel_file, sheet_name='Products')
|
|
|
df_products.columns = [c.strip().lower().replace(' ', '_') for c in df_products.columns]
|
|
df_products.columns = [c.strip().lower().replace(' ', '_') for c in df_products.columns]
|
|
|
|
|
|
|
|
expected_product_cols = {
|
|
expected_product_cols = {
|
|
|
- 'item_id', 'product_name', 'product_long_description',
|
|
|
|
|
- 'product_short_description', 'product_type', 'image_path'
|
|
|
|
|
|
|
+ 'item_id', 'product_name', 'product_long_description',
|
|
|
|
|
+ 'product_short_description', 'product_type', 'image_path'
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
if not expected_product_cols.issubset(df_products.columns):
|
|
if not expected_product_cols.issubset(df_products.columns):
|
|
|
- logger.error(f"Upload failed: Missing required columns in Products sheet.")
|
|
|
|
|
- return Response({
|
|
|
|
|
- 'error': 'Missing required columns in Products sheet',
|
|
|
|
|
- 'required_columns': list(expected_product_cols),
|
|
|
|
|
- 'found_columns': list(df_products.columns)
|
|
|
|
|
- }, status=status.HTTP_400_BAD_REQUEST)
|
|
|
|
|
|
|
+ logger.error("Missing required columns in Products sheet.")
|
|
|
|
|
+ return Response({
|
|
|
|
|
+ 'error': 'Missing required columns in Products sheet',
|
|
|
|
|
+ 'required_columns': list(expected_product_cols),
|
|
|
|
|
+ 'found_columns': list(df_products.columns)
|
|
|
|
|
+ }, status=status.HTTP_400_BAD_REQUEST)
|
|
|
|
|
|
|
|
- df_attributes = None
|
|
|
|
|
|
|
+ # Optional attributes sheet
|
|
|
has_attributes_sheet = 'Attribute_values' in excel_file.sheet_names
|
|
has_attributes_sheet = 'Attribute_values' in excel_file.sheet_names
|
|
|
-
|
|
|
|
|
|
|
+ df_attributes = None
|
|
|
if has_attributes_sheet:
|
|
if has_attributes_sheet:
|
|
|
- df_attributes = pd.read_excel(excel_file, sheet_name='Attribute_values')
|
|
|
|
|
- df_attributes.columns = [c.strip().lower().replace(' ', '_') for c in df_attributes.columns]
|
|
|
|
|
-
|
|
|
|
|
- expected_attr_cols = {'item_id', 'attribute_name', 'original_value'}
|
|
|
|
|
- if not expected_attr_cols.issubset(df_attributes.columns):
|
|
|
|
|
- logger.error(f"Upload failed: Missing required columns in Attribute_values sheet.")
|
|
|
|
|
- return Response({
|
|
|
|
|
- 'error': 'Missing required columns in Attribute_values sheet',
|
|
|
|
|
- 'required_columns': list(expected_attr_cols),
|
|
|
|
|
- 'found_columns': list(df_attributes.columns)
|
|
|
|
|
- }, status=status.HTTP_400_BAD_REQUEST)
|
|
|
|
|
|
|
+ df_attributes = pd.read_excel(excel_file, sheet_name='Attribute_values')
|
|
|
|
|
+ df_attributes.columns = [c.strip().lower().replace(' ', '_') for c in df_attributes.columns]
|
|
|
|
|
+
|
|
|
|
|
+ expected_attr_cols = {'item_id', 'attribute_name', 'original_value'}
|
|
|
|
|
+ if not expected_attr_cols.issubset(df_attributes.columns):
|
|
|
|
|
+ logger.error("Missing required columns in Attribute_values sheet.")
|
|
|
|
|
+ return Response({
|
|
|
|
|
+ 'error': 'Missing required columns in Attribute_values sheet',
|
|
|
|
|
+ 'required_columns': list(expected_attr_cols),
|
|
|
|
|
+ 'found_columns': list(df_attributes.columns)
|
|
|
|
|
+ }, status=status.HTTP_400_BAD_REQUEST)
|
|
|
|
|
|
|
|
products_created = 0
|
|
products_created = 0
|
|
|
products_updated = 0
|
|
products_updated = 0
|
|
|
|
|
+ products_deleted = 0
|
|
|
attributes_created = 0
|
|
attributes_created = 0
|
|
|
attributes_updated = 0
|
|
attributes_updated = 0
|
|
|
|
|
+ attributes_deleted = 0
|
|
|
products_failed = 0
|
|
products_failed = 0
|
|
|
attributes_failed = 0
|
|
attributes_failed = 0
|
|
|
errors = []
|
|
errors = []
|
|
|
|
|
|
|
|
with transaction.atomic():
|
|
with transaction.atomic():
|
|
|
- for idx, row in df_products.iterrows():
|
|
|
|
|
- item_id = str(row.get('item_id', '')).strip()
|
|
|
|
|
- product_type = str(row.get('product_type', '')).strip()
|
|
|
|
|
-
|
|
|
|
|
- if not item_id:
|
|
|
|
|
- products_failed += 1
|
|
|
|
|
- errors.append(f"Products Row {idx + 2}: Missing item_id")
|
|
|
|
|
- continue
|
|
|
|
|
-
|
|
|
|
|
- try:
|
|
|
|
|
- if product_type:
|
|
|
|
|
- ProductType.objects.get_or_create(name=product_type)
|
|
|
|
|
-
|
|
|
|
|
- defaults = {
|
|
|
|
|
- 'product_name': str(row.get('product_name', '')),
|
|
|
|
|
- 'product_long_description': str(row.get('product_long_description', '')),
|
|
|
|
|
- 'product_short_description': str(row.get('product_short_description', '')),
|
|
|
|
|
- 'product_type': product_type,
|
|
|
|
|
- 'image_path': str(row.get('image_path', '')),
|
|
|
|
|
- }
|
|
|
|
|
-
|
|
|
|
|
- obj, created = Product.objects.update_or_create(item_id=item_id, defaults=defaults)
|
|
|
|
|
-
|
|
|
|
|
- if created: products_created += 1
|
|
|
|
|
- else: products_updated += 1
|
|
|
|
|
- except Exception as e:
|
|
|
|
|
- products_failed += 1
|
|
|
|
|
- errors.append(f"Products Row {idx + 2} (item_id: {item_id}): {str(e)}")
|
|
|
|
|
- logger.error(f"Error processing product {item_id} in Products sheet: {e}")
|
|
|
|
|
-
|
|
|
|
|
-
|
|
|
|
|
- if has_attributes_sheet and df_attributes is not None:
|
|
|
|
|
- item_ids_in_attrs = df_attributes['item_id'].astype(str).unique()
|
|
|
|
|
- existing_products = {p.item_id: p for p in Product.objects.filter(item_id__in=item_ids_in_attrs)}
|
|
|
|
|
-
|
|
|
|
|
- for idx, row in df_attributes.iterrows():
|
|
|
|
|
- item_id = str(row.get('item_id', '')).strip()
|
|
|
|
|
- attribute_name = str(row.get('attribute_name', '')).strip()
|
|
|
|
|
- original_value = str(row.get('original_value', '')).strip()
|
|
|
|
|
-
|
|
|
|
|
- if not item_id or not attribute_name:
|
|
|
|
|
- attributes_failed += 1
|
|
|
|
|
- errors.append(f"Attribute_values Row {idx + 2}: Missing item_id or attribute_name")
|
|
|
|
|
- continue
|
|
|
|
|
-
|
|
|
|
|
- product = existing_products.get(item_id)
|
|
|
|
|
- if not product:
|
|
|
|
|
- attributes_failed += 1
|
|
|
|
|
- errors.append(f"Attribute_values Row {idx + 2}: Product with item_id '{item_id}' not found. Make sure it exists in Products sheet.")
|
|
|
|
|
- continue
|
|
|
|
|
-
|
|
|
|
|
- try:
|
|
|
|
|
- attr_obj, created = ProductAttributeValue.objects.update_or_create(
|
|
|
|
|
- product=product,
|
|
|
|
|
- attribute_name=attribute_name,
|
|
|
|
|
- defaults={'original_value': original_value}
|
|
|
|
|
- )
|
|
|
|
|
- if created: attributes_created += 1
|
|
|
|
|
- else: attributes_updated += 1
|
|
|
|
|
- except Exception as e:
|
|
|
|
|
- attributes_failed += 1
|
|
|
|
|
- errors.append(f"Attribute_values Row {idx + 2} (item_id: {item_id}, attribute: {attribute_name}): {str(e)}")
|
|
|
|
|
- logger.error(f"Error processing attribute {attribute_name} for product {item_id}: {e}")
|
|
|
|
|
-
|
|
|
|
|
- # Prepare response data
|
|
|
|
|
|
|
+ # -------------------------------
|
|
|
|
|
+ # 🔥 TRUE SYNC: Delete missing products
|
|
|
|
|
+ # -------------------------------
|
|
|
|
|
+ existing_item_ids = set(Product.objects.values_list('item_id', flat=True))
|
|
|
|
|
+ uploaded_item_ids = set(df_products['item_id'].astype(str))
|
|
|
|
|
+ to_delete = existing_item_ids - uploaded_item_ids
|
|
|
|
|
+
|
|
|
|
|
+ if to_delete:
|
|
|
|
|
+ deleted_count, _ = Product.objects.filter(item_id__in=to_delete).delete()
|
|
|
|
|
+ products_deleted += deleted_count
|
|
|
|
|
+ logger.info(f"Deleted {deleted_count} products missing in Excel.")
|
|
|
|
|
+
|
|
|
|
|
+ # -------------------------------
|
|
|
|
|
+ # ✅ Create or update products
|
|
|
|
|
+ # -------------------------------
|
|
|
|
|
+ for idx, row in df_products.iterrows():
|
|
|
|
|
+ item_id = str(row.get('item_id', '')).strip()
|
|
|
|
|
+ product_type = str(row.get('product_type', '')).strip()
|
|
|
|
|
+
|
|
|
|
|
+ if not item_id:
|
|
|
|
|
+ products_failed += 1
|
|
|
|
|
+ errors.append(f"Products Row {idx + 2}: Missing item_id")
|
|
|
|
|
+ continue
|
|
|
|
|
+
|
|
|
|
|
+ try:
|
|
|
|
|
+ if product_type:
|
|
|
|
|
+ ProductType.objects.get_or_create(name=product_type)
|
|
|
|
|
+
|
|
|
|
|
+ defaults = {
|
|
|
|
|
+ 'product_name': str(row.get('product_name', '')),
|
|
|
|
|
+ 'product_long_description': str(row.get('product_long_description', '')),
|
|
|
|
|
+ 'product_short_description': str(row.get('product_short_description', '')),
|
|
|
|
|
+ 'product_type': product_type,
|
|
|
|
|
+ 'image_path': str(row.get('image_path', '')),
|
|
|
|
|
+ }
|
|
|
|
|
+
|
|
|
|
|
+ obj, created = Product.objects.update_or_create(item_id=item_id, defaults=defaults)
|
|
|
|
|
+ if created:
|
|
|
|
|
+ products_created += 1
|
|
|
|
|
+ else:
|
|
|
|
|
+ products_updated += 1
|
|
|
|
|
+ except Exception as e:
|
|
|
|
|
+ products_failed += 1
|
|
|
|
|
+ errors.append(f"Products Row {idx + 2} (item_id: {item_id}): {str(e)}")
|
|
|
|
|
+ logger.error(f"Error processing product {item_id}: {e}")
|
|
|
|
|
+
|
|
|
|
|
+ # -------------------------------
|
|
|
|
|
+ # ✅ Handle attributes (optional)
|
|
|
|
|
+ # -------------------------------
|
|
|
|
|
+ if has_attributes_sheet and df_attributes is not None:
|
|
|
|
|
+ item_ids_in_attrs = df_attributes['item_id'].astype(str).unique()
|
|
|
|
|
+ existing_products = {p.item_id: p for p in Product.objects.filter(item_id__in=item_ids_in_attrs)}
|
|
|
|
|
+
|
|
|
|
|
+ # 🔥 TRUE SYNC for attributes: delete attributes linked to deleted products
|
|
|
|
|
+ if to_delete:
|
|
|
|
|
+ deleted_attr_count, _ = ProductAttributeValue.objects.filter(product__item_id__in=to_delete).delete()
|
|
|
|
|
+ attributes_deleted += deleted_attr_count
|
|
|
|
|
+ logger.info(f"Deleted {deleted_attr_count} attributes linked to removed products.")
|
|
|
|
|
+
|
|
|
|
|
+ for idx, row in df_attributes.iterrows():
|
|
|
|
|
+ item_id = str(row.get('item_id', '')).strip()
|
|
|
|
|
+ attribute_name = str(row.get('attribute_name', '')).strip()
|
|
|
|
|
+ original_value = str(row.get('original_value', '')).strip()
|
|
|
|
|
+
|
|
|
|
|
+ if not item_id or not attribute_name:
|
|
|
|
|
+ attributes_failed += 1
|
|
|
|
|
+ errors.append(f"Attribute_values Row {idx + 2}: Missing item_id or attribute_name")
|
|
|
|
|
+ continue
|
|
|
|
|
+
|
|
|
|
|
+ product = existing_products.get(item_id)
|
|
|
|
|
+ if not product:
|
|
|
|
|
+ attributes_failed += 1
|
|
|
|
|
+ errors.append(f"Attribute_values Row {idx + 2}: Product '{item_id}' not found.")
|
|
|
|
|
+ continue
|
|
|
|
|
+
|
|
|
|
|
+ try:
|
|
|
|
|
+ attr_obj, created = ProductAttributeValue.objects.update_or_create(
|
|
|
|
|
+ product=product,
|
|
|
|
|
+ attribute_name=attribute_name,
|
|
|
|
|
+ defaults={'original_value': original_value}
|
|
|
|
|
+ )
|
|
|
|
|
+ if created:
|
|
|
|
|
+ attributes_created += 1
|
|
|
|
|
+ else:
|
|
|
|
|
+ attributes_updated += 1
|
|
|
|
|
+ except Exception as e:
|
|
|
|
|
+ attributes_failed += 1
|
|
|
|
|
+ errors.append(f"Attribute_values Row {idx + 2} (item_id: {item_id}, attribute: {attribute_name}): {str(e)}")
|
|
|
|
|
+ logger.error(f"Error processing attribute {attribute_name} for {item_id}: {e}")
|
|
|
|
|
+
|
|
|
|
|
+ # -------------------------------
|
|
|
|
|
+ # ✅ Prepare response
|
|
|
|
|
+ # -------------------------------
|
|
|
response_data = {
|
|
response_data = {
|
|
|
- 'message': 'Upload completed',
|
|
|
|
|
|
|
+ 'message': 'Upload and synchronization completed',
|
|
|
'products': {
|
|
'products': {
|
|
|
- 'created': products_created, 'updated': products_updated, 'failed': products_failed,
|
|
|
|
|
- 'total_processed': products_created + products_updated + products_failed
|
|
|
|
|
|
|
+ 'created': products_created,
|
|
|
|
|
+ 'updated': products_updated,
|
|
|
|
|
+ 'deleted': products_deleted,
|
|
|
|
|
+ 'failed': products_failed,
|
|
|
|
|
+ 'total_processed': products_created + products_updated + products_deleted + products_failed
|
|
|
},
|
|
},
|
|
|
'attribute_values': {
|
|
'attribute_values': {
|
|
|
- 'created': attributes_created, 'updated': attributes_updated, 'failed': attributes_failed,
|
|
|
|
|
- 'total_processed': attributes_created + attributes_updated + attributes_failed
|
|
|
|
|
|
|
+ 'created': attributes_created,
|
|
|
|
|
+ 'updated': attributes_updated,
|
|
|
|
|
+ 'deleted': attributes_deleted,
|
|
|
|
|
+ 'failed': attributes_failed,
|
|
|
|
|
+ 'total_processed': attributes_created + attributes_updated + attributes_deleted + attributes_failed
|
|
|
} if has_attributes_sheet else {'message': 'Attribute_values sheet not found in Excel file'},
|
|
} if has_attributes_sheet else {'message': 'Attribute_values sheet not found in Excel file'},
|
|
|
- 'generated_excel_status': 'Excel generation started in the background.'
|
|
|
|
|
|
|
+ 'generated_excel_status': 'Excel generation skipped (true sync mode).'
|
|
|
}
|
|
}
|
|
|
|
|
|
|
|
if errors:
|
|
if errors:
|
|
@@ -1437,34 +1669,16 @@ class ProductUploadExcelView(APIView):
|
|
|
if len(errors) > 50:
|
|
if len(errors) > 50:
|
|
|
response_data['errors'].append(f"... and {len(errors) - 50} more errors")
|
|
response_data['errors'].append(f"... and {len(errors) - 50} more errors")
|
|
|
|
|
|
|
|
- upload_status = status.HTTP_201_CREATED if products_failed == 0 and attributes_failed == 0 else status.HTTP_207_MULTI_STATUS
|
|
|
|
|
-
|
|
|
|
|
- # Start background thread for Excel generation if upload was successful
|
|
|
|
|
- if products_failed == 0 and attributes_failed == 0:
|
|
|
|
|
- logger.info("API call successful. Triggering background Excel generation thread is commented for now !!!!.")
|
|
|
|
|
- # threading.Thread(target=generate_product_excel_background, daemon=True).start()
|
|
|
|
|
-
|
|
|
|
|
- ## FIX: Update monitoring URLs to point to the new generated_outputs subfolder
|
|
|
|
|
- # response_data['generated_excel_status'] = 'Background Excel generation triggered successfully.'
|
|
|
|
|
- # response_data['monitoring'] = {
|
|
|
|
|
- # 'excel_file': os.path.join(OUTPUT_URL, EXCEL_FILE_NAME),
|
|
|
|
|
- # 'status_file': os.path.join(OUTPUT_URL, STATUS_FILE_NAME),
|
|
|
|
|
- # 'log_file': os.path.join(OUTPUT_URL, LOG_FILE_NAME),
|
|
|
|
|
- # 'note': 'These files will be available once the background process completes.'
|
|
|
|
|
- # }
|
|
|
|
|
- else:
|
|
|
|
|
- logger.warning(f"API call finished with errors ({products_failed} products, {attributes_failed} attributes). Not triggering background excel generation.")
|
|
|
|
|
- response_data['generated_excel_status'] = 'Background Excel generation was NOT triggered due to upload errors. Fix upload errors and re-upload.'
|
|
|
|
|
-
|
|
|
|
|
-
|
|
|
|
|
|
|
+ upload_status = status.HTTP_201_CREATED if (products_failed == 0 and attributes_failed == 0) else status.HTTP_207_MULTI_STATUS
|
|
|
return Response(response_data, status=upload_status)
|
|
return Response(response_data, status=upload_status)
|
|
|
|
|
|
|
|
except pd.errors.EmptyDataError:
|
|
except pd.errors.EmptyDataError:
|
|
|
logger.error('The uploaded Excel file is empty or invalid.')
|
|
logger.error('The uploaded Excel file is empty or invalid.')
|
|
|
return Response({'error': 'The uploaded Excel file is empty or invalid'}, status=status.HTTP_400_BAD_REQUEST)
|
|
return Response({'error': 'The uploaded Excel file is empty or invalid'}, status=status.HTTP_400_BAD_REQUEST)
|
|
|
except Exception as e:
|
|
except Exception as e:
|
|
|
- logger.exception(f'An unexpected error occurred while processing the file.')
|
|
|
|
|
- return Response({'error': f'An unexpected error occurred while processing the file: {str(e)}'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
|
|
|
|
|
|
+ logger.exception(f'Unexpected error while processing Excel file.')
|
|
|
|
|
+ return Response({'error': f'Unexpected error: {str(e)}'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
|
|
|
|
+
|
|
|
|
|
|
|
|
|
|
|
|
|
class DownloadExcelTemplateView(APIView):
|
|
class DownloadExcelTemplateView(APIView):
|
|
@@ -1871,7 +2085,6 @@ class ProductAttributesUploadView(APIView):
|
|
|
return Response({"error": str(e)}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
|
return Response({"error": str(e)}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
|
|
|
|
|
|
|
|
|
|
|
|
|
-
|
|
|
|
|
class ProductTypeAttributesView(APIView):
|
|
class ProductTypeAttributesView(APIView):
|
|
|
"""
|
|
"""
|
|
|
API to view, create, update, and delete product type attributes and their possible values.
|
|
API to view, create, update, and delete product type attributes and their possible values.
|