views.py 79 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875
  1. # ==================== Cleaned & Optimized Imports ====================
  2. import os
  3. import io
  4. import json
  5. import logging
  6. import threading
  7. from datetime import datetime
  8. import pandas as pd
  9. import concurrent.futures
  10. from django.conf import settings
  11. from django.http import HttpResponse
  12. from django.db import transaction
  13. from django.db.models import Prefetch
  14. from rest_framework.views import APIView
  15. from rest_framework.response import Response
  16. from rest_framework import status
  17. from rest_framework.parsers import MultiPartParser, FormParser
  18. from openpyxl import Workbook
  19. from openpyxl.styles import Font, PatternFill, Alignment
  20. from rest_framework.views import APIView
  21. from . import cache_config
  22. # --- Local imports ---
  23. from .models import (
  24. Product,
  25. ProductType,
  26. ProductAttribute,
  27. ProductAttributeValue,
  28. AttributePossibleValue,
  29. )
  30. from .serializers import (
  31. ProductSerializer,
  32. ProductTypeSerializer,
  33. ProductAttributeSerializer,
  34. AttributePossibleValueSerializer,
  35. SingleProductRequestSerializer,
  36. BatchProductRequestSerializer,
  37. ProductAttributeResultSerializer,
  38. BatchProductResponseSerializer,
  39. ProductAttributeValueSerializer,
  40. ProductAttributeValueInputSerializer,
  41. BulkProductAttributeValueSerializer,
  42. ProductWithAttributesSerializer,
  43. )
  44. from .services import ProductAttributeService
  45. from .ocr_service import OCRService
  46. from .visual_processing_service import VisualProcessingService
  47. # --- Configuration for Generated Outputs Folder ---
  48. OUTPUT_FOLDER_NAME = 'generated_outputs'
  49. OUTPUT_ROOT = os.path.join(settings.MEDIA_ROOT, OUTPUT_FOLDER_NAME)
  50. OUTPUT_URL = os.path.join(settings.MEDIA_URL, OUTPUT_FOLDER_NAME).replace('\\', '/') # Ensure forward slashes
  51. LOG_FILE_NAME = 'excel_generation.log'
  52. STATUS_FILE_NAME = 'excel_generation_status.json'
  53. EXCEL_FILE_NAME = 'generated_products.xlsx'
  54. LOG_FILE_PATH = os.path.join(OUTPUT_ROOT, LOG_FILE_NAME)
  55. STATUS_FILE_PATH = os.path.join(OUTPUT_ROOT, STATUS_FILE_NAME)
  56. EXCEL_FILE_PATH = os.path.join(OUTPUT_ROOT, EXCEL_FILE_NAME)
  57. # Ensure the output folder exists
  58. if not os.path.exists(OUTPUT_ROOT):
  59. os.makedirs(OUTPUT_ROOT)
  60. # Configure logging
  61. logging.basicConfig(
  62. filename=LOG_FILE_PATH,
  63. level=logging.INFO,
  64. format='%(asctime)s - %(levelname)s - %(message)s'
  65. )
  66. logger = logging.getLogger(__name__)
  67. class ExtractProductAttributesView(APIView):
  68. """
  69. API endpoint to extract product attributes for a single product by item_id.
  70. Fetches product details from database with source tracking.
  71. Returns attributes in array format: [{"value": "...", "source": "..."}]
  72. Includes OCR and Visual Processing results.
  73. """
  74. def post(self, request):
  75. serializer = SingleProductRequestSerializer(data=request.data)
  76. if not serializer.is_valid():
  77. return Response({"error": serializer.errors}, status=status.HTTP_400_BAD_REQUEST)
  78. validated_data = serializer.validated_data
  79. item_id = validated_data.get("item_id")
  80. # Fetch product from DB
  81. try:
  82. product = Product.objects.get(item_id=item_id)
  83. except Product.DoesNotExist:
  84. return Response(
  85. {"error": f"Product with item_id '{item_id}' not found."},
  86. status=status.HTTP_404_NOT_FOUND
  87. )
  88. # Extract product details
  89. title = product.product_name
  90. short_desc = product.product_short_description
  91. long_desc = product.product_long_description
  92. image_url = product.image_path
  93. # Process image for OCR if required
  94. ocr_results = None
  95. ocr_text = None
  96. visual_results = None
  97. if validated_data.get("process_image", True) and image_url:
  98. # OCR Processing
  99. ocr_service = OCRService()
  100. ocr_results = ocr_service.process_image(image_url)
  101. if ocr_results and ocr_results.get("detected_text"):
  102. ocr_attrs = ProductAttributeService.extract_attributes_from_ocr(
  103. ocr_results, validated_data.get("model")
  104. )
  105. ocr_results["extracted_attributes"] = ocr_attrs
  106. ocr_text = "\n".join([
  107. f"{item['text']} (confidence: {item['confidence']:.2f})"
  108. for item in ocr_results["detected_text"]
  109. ])
  110. # Visual Processing
  111. visual_service = VisualProcessingService()
  112. product_type_hint = product.product_type if hasattr(product, 'product_type') else None
  113. visual_results = visual_service.process_image(image_url, product_type_hint)
  114. # Combine all product text with source tracking
  115. product_text, source_map = ProductAttributeService.combine_product_text(
  116. title=title,
  117. short_desc=short_desc,
  118. long_desc=long_desc,
  119. ocr_text=ocr_text
  120. )
  121. # Extract attributes with enhanced features and source tracking
  122. result = ProductAttributeService.extract_attributes(
  123. product_text=product_text,
  124. mandatory_attrs=validated_data["mandatory_attrs"],
  125. source_map=source_map,
  126. model=validated_data.get("model"),
  127. extract_additional=validated_data.get("extract_additional", True),
  128. multiple=validated_data.get("multiple", []),
  129. threshold_abs=validated_data.get("threshold_abs", 0.65),
  130. margin=validated_data.get("margin", 0.15),
  131. use_dynamic_thresholds=validated_data.get("use_dynamic_thresholds", True),
  132. use_adaptive_margin=validated_data.get("use_adaptive_margin", True),
  133. use_semantic_clustering=validated_data.get("use_semantic_clustering", True)
  134. )
  135. # Attach OCR results if available
  136. if ocr_results:
  137. result["ocr_results"] = ocr_results
  138. # Attach Visual Processing results if available
  139. if visual_results:
  140. result["visual_results"] = visual_results
  141. response_serializer = ProductAttributeResultSerializer(data=result)
  142. if response_serializer.is_valid():
  143. return Response(response_serializer.data, status=status.HTTP_200_OK)
  144. return Response(result, status=status.HTTP_200_OK)
  145. # ==================== OPTIMIZED BATCH VIEW ====================
  146. class BatchExtractProductAttributesView(APIView):
  147. """
  148. ⚡ PERFORMANCE OPTIMIZED: Batch extraction with intelligent parallelization
  149. Expected performance: 10 products in 30-60 seconds (with image processing)
  150. """
  151. def post(self, request):
  152. import time
  153. start_time = time.time()
  154. serializer = BatchProductRequestSerializer(data=request.data)
  155. if not serializer.is_valid():
  156. return Response({"error": serializer.errors}, status=status.HTTP_400_BAD_REQUEST)
  157. validated_data = serializer.validated_data
  158. product_list = validated_data.get("products", [])
  159. logger.info(f"🚀 Starting batch processing for {len(product_list)} products")
  160. # ==================== OPTIMIZATION 1: Bulk DB Query ====================
  161. item_ids = [p['item_id'] for p in product_list]
  162. products_queryset = Product.objects.filter(
  163. item_id__in=item_ids
  164. ).prefetch_related('attribute_values')
  165. product_map = {product.item_id: product for product in products_queryset}
  166. # Prefetch ALL original attribute values in ONE query
  167. original_values_qs = ProductAttributeValue.objects.filter(
  168. product__item_id__in=item_ids
  169. ).select_related('product')
  170. original_values_map = {}
  171. for attr_val in original_values_qs:
  172. item_id = attr_val.product.item_id
  173. if item_id not in original_values_map:
  174. original_values_map[item_id] = {}
  175. original_values_map[item_id][attr_val.attribute_name] = attr_val.original_value
  176. logger.info(f"✓ Loaded {len(product_map)} products from database")
  177. # Extract settings
  178. model = validated_data.get("model")
  179. extract_additional = validated_data.get("extract_additional", True)
  180. process_image = validated_data.get("process_image", True)
  181. multiple = validated_data.get("multiple", [])
  182. threshold_abs = validated_data.get("threshold_abs", 0.65)
  183. margin = validated_data.get("margin", 0.15)
  184. use_dynamic_thresholds = validated_data.get("use_dynamic_thresholds", False)
  185. use_adaptive_margin = validated_data.get("use_adaptive_margin", False)
  186. use_semantic_clustering = validated_data.get("use_semantic_clustering", False)
  187. results = []
  188. successful = 0
  189. failed = 0
  190. # ==================== OPTIMIZATION 2: Conditional Service Init ====================
  191. # Only initialize if processing images
  192. ocr_service = None
  193. visual_service = None
  194. if process_image:
  195. from .ocr_service import OCRService
  196. from .visual_processing_service import VisualProcessingService
  197. ocr_service = OCRService()
  198. visual_service = VisualProcessingService()
  199. logger.info("✓ Image processing services initialized")
  200. # ==================== OPTIMIZATION 3: Smart Parallelization ====================
  201. def process_single_product(product_entry):
  202. """Process a single product (runs in parallel)"""
  203. import time
  204. product_start = time.time()
  205. item_id = product_entry['item_id']
  206. mandatory_attrs = product_entry['mandatory_attrs']
  207. if item_id not in product_map:
  208. return {
  209. "product_id": item_id,
  210. "error": "Product not found in database"
  211. }, False
  212. product = product_map[item_id]
  213. try:
  214. title = product.product_name
  215. short_desc = product.product_short_description
  216. long_desc = product.product_long_description
  217. image_url = product.image_path
  218. ocr_results = None
  219. ocr_text = None
  220. visual_results = None
  221. # ⚡ SKIP IMAGE PROCESSING IF DISABLED (HUGE TIME SAVER)
  222. if process_image and image_url:
  223. if ocr_service:
  224. ocr_results = ocr_service.process_image(image_url)
  225. if ocr_results and ocr_results.get("detected_text"):
  226. ocr_attrs = ProductAttributeService.extract_attributes_from_ocr(
  227. ocr_results, model
  228. )
  229. ocr_results["extracted_attributes"] = ocr_attrs
  230. ocr_text = "\n".join([
  231. f"{item['text']} (confidence: {item['confidence']:.2f})"
  232. for item in ocr_results["detected_text"]
  233. ])
  234. if visual_service:
  235. product_type_hint = product.product_type if hasattr(product, 'product_type') else None
  236. visual_results = visual_service.process_image(image_url, product_type_hint)
  237. if visual_results and visual_results.get('visual_attributes'):
  238. visual_results['visual_attributes'] = ProductAttributeService.format_visual_attributes(
  239. visual_results['visual_attributes']
  240. )
  241. # Combine product text with source tracking
  242. product_text, source_map = ProductAttributeService.combine_product_text(
  243. title=title,
  244. short_desc=short_desc,
  245. long_desc=long_desc,
  246. ocr_text=ocr_text
  247. )
  248. # ⚡ EXTRACT ATTRIBUTES WITH CACHING ENABLED
  249. extracted = ProductAttributeService.extract_attributes(
  250. product_text=product_text,
  251. mandatory_attrs=mandatory_attrs,
  252. source_map=source_map,
  253. model=model,
  254. extract_additional=extract_additional,
  255. multiple=multiple,
  256. # threshold_abs=threshold_abs,
  257. # margin=margin,
  258. # use_dynamic_thresholds=use_dynamic_thresholds,
  259. # use_adaptive_margin=use_adaptive_margin,
  260. # use_semantic_clustering=use_semantic_clustering,
  261. use_cache=True # ⚡ CRITICAL: Enable caching
  262. )
  263. # Add original values
  264. original_attrs = original_values_map.get(item_id, {})
  265. for attr_name, attr_values in extracted.get("mandatory", {}).items():
  266. if isinstance(attr_values, list):
  267. for attr_obj in attr_values:
  268. if isinstance(attr_obj, dict):
  269. attr_obj["original_value"] = original_attrs.get(attr_name, "")
  270. for attr_name, attr_values in extracted.get("additional", {}).items():
  271. if isinstance(attr_values, list):
  272. for attr_obj in attr_values:
  273. if isinstance(attr_obj, dict):
  274. attr_obj["original_value"] = original_attrs.get(attr_name, "")
  275. result = {
  276. "product_id": product.item_id,
  277. "mandatory": extracted.get("mandatory", {}),
  278. "additional": extracted.get("additional", {}),
  279. }
  280. if ocr_results:
  281. result["ocr_results"] = ocr_results
  282. if visual_results:
  283. result["visual_results"] = visual_results
  284. processing_time = time.time() - product_start
  285. logger.info(f"✓ Processed {item_id} in {processing_time:.2f}s")
  286. return result, True
  287. except Exception as e:
  288. logger.error(f"❌ Error processing {item_id}: {str(e)}")
  289. return {
  290. "product_id": item_id,
  291. "error": str(e)
  292. }, False
  293. # ==================== OPTIMIZATION 4: Parallel Execution ====================
  294. # Adjust workers based on whether image processing is enabled
  295. max_workers = min(3 if process_image else 10, len(product_list))
  296. logger.info(f"⚡ Using {max_workers} parallel workers")
  297. with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor:
  298. # Submit all tasks
  299. future_to_product = {
  300. executor.submit(process_single_product, product): product
  301. for product in product_list
  302. }
  303. # Collect results as they complete
  304. for future in concurrent.futures.as_completed(future_to_product):
  305. try:
  306. result, success = future.result()
  307. results.append(result)
  308. if success:
  309. successful += 1
  310. else:
  311. failed += 1
  312. except Exception as e:
  313. failed += 1
  314. logger.error(f"❌ Future execution error: {str(e)}")
  315. results.append({
  316. "product_id": "unknown",
  317. "error": str(e)
  318. })
  319. total_time = time.time() - start_time
  320. # Get cache statistics
  321. cache_stats = ProductAttributeService.get_cache_stats()
  322. logger.info(f"""
  323. 🎉 BATCH PROCESSING COMPLETE
  324. ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
  325. Total products: {len(product_list)}
  326. Successful: {successful}
  327. Failed: {failed}
  328. Total time: {total_time:.2f}s
  329. Avg time/product: {total_time/len(product_list):.2f}s
  330. Cache hit rate: {cache_stats['embedding_cache']['hit_rate_percent']:.1f}%
  331. ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
  332. """)
  333. batch_result = {
  334. "results": results,
  335. "total_products": len(product_list),
  336. "successful": successful,
  337. "failed": failed,
  338. "performance": {
  339. "total_time_seconds": round(total_time, 2),
  340. "avg_time_per_product": round(total_time / len(product_list), 2),
  341. "workers_used": max_workers
  342. },
  343. "cache_stats": cache_stats
  344. }
  345. response_serializer = BatchProductResponseSerializer(data=batch_result)
  346. if response_serializer.is_valid():
  347. return Response(response_serializer.data, status=status.HTTP_200_OK)
  348. return Response(batch_result, status=status.HTTP_200_OK)
  349. class ProductListView(APIView):
  350. """
  351. GET API to list all products with details
  352. """
  353. def get(self, request):
  354. products = Product.objects.all()
  355. serializer = ProductSerializer(products, many=True)
  356. return Response(serializer.data, status=status.HTTP_200_OK)
  357. # -------------------------------------------------------------------------------------------------
  358. def generate_product_excel_background():
  359. """
  360. Function to perform batch attribute extraction for all products and generate an Excel file.
  361. Runs in a background thread to avoid blocking the API response.
  362. Logs success/failure and saves a status file for external monitoring.
  363. """
  364. logger.info(f"[{datetime.now().isoformat()}] Starting background product Excel generation and attribute extraction.")
  365. successful = 0
  366. failed = 0
  367. results = [] # To store detailed extraction results for Excel sheet 2
  368. # Function to write status file (SUCCESS/FAILED)
  369. def write_status(status_type, error_msg=None):
  370. status_data = {
  371. "status": status_type,
  372. "timestamp": datetime.now().isoformat(),
  373. "products_processed": successful + failed,
  374. "products_successful": successful,
  375. "products_failed": failed,
  376. # FIX: Use the updated EXCEL_FILE_NAME and OUTPUT_URL
  377. "excel_path": os.path.join(OUTPUT_URL, EXCEL_FILE_NAME) if status_type == "SUCCESS" else None,
  378. "log_path": os.path.join(OUTPUT_URL, LOG_FILE_NAME),
  379. "error_message": error_msg
  380. }
  381. try:
  382. # FIX: STATUS_FILE_PATH is now inside generated_outputs
  383. with open(STATUS_FILE_PATH, 'w') as f:
  384. json.dump(status_data, f, indent=4)
  385. except Exception as e:
  386. logger.exception(f"CRITICAL ERROR: Failed to write status file at {STATUS_FILE_PATH}: {e}")
  387. try:
  388. # 1. PREFETCH all necessary related data to minimize database queries
  389. # Prefetch possible values for mandatory attributes
  390. possible_values_prefetch = Prefetch(
  391. 'attributes',
  392. queryset=ProductAttribute.objects.filter(is_mandatory=True).prefetch_related('possible_values')
  393. )
  394. # Fetch all ProductTypes with their mandatory attributes and possible values
  395. all_product_types = ProductType.objects.prefetch_related(possible_values_prefetch)
  396. product_type_map = {
  397. pt.name: pt for pt in all_product_types
  398. }
  399. # Prepare product_list for batch extraction
  400. all_products = Product.objects.all()
  401. product_list = []
  402. for p in all_products:
  403. mandatory_attrs_dict = {}
  404. product_type_name = p.product_type.strip() if p.product_type else None
  405. if product_type_name and product_type_name in product_type_map:
  406. pt = product_type_map[product_type_name]
  407. # Build the mandatory_attrs dictionary: { "Attribute Name": ["Value 1", "Value 2"], ... }
  408. for attr in pt.attributes.all():
  409. mandatory_attrs_dict[attr.name] = [
  410. pv.value for pv in attr.possible_values.all()
  411. ]
  412. product_list.append({
  413. "item_id": p.item_id,
  414. "product_type_name": product_type_name,
  415. "mandatory_attrs": mandatory_attrs_dict
  416. })
  417. # Batch settings (using defaults)
  418. model = "llama-3.1-8b-instant"
  419. extract_additional = True
  420. process_image = False
  421. multiple = []
  422. threshold_abs = 0.65
  423. margin = 0.15
  424. use_dynamic_thresholds = True
  425. use_adaptive_margin = True
  426. use_semantic_clustering = True
  427. # Batch extraction logic
  428. item_ids = [p['item_id'] for p in product_list]
  429. products_queryset = Product.objects.filter(item_id__in=item_ids)
  430. product_map = {product.item_id: product for product in products_queryset}
  431. found_ids = set(product_map.keys())
  432. for product_entry in product_list:
  433. item_id = product_entry['item_id']
  434. mandatory_attrs = product_entry['mandatory_attrs']
  435. if item_id not in found_ids:
  436. failed += 1
  437. results.append({
  438. "product_id": item_id,
  439. "error": "Product not found in database"
  440. })
  441. logger.warning(f"Product {item_id} not found in database. Skipping extraction.")
  442. continue
  443. product = product_map[item_id]
  444. try:
  445. title = product.product_name
  446. short_desc = product.product_short_description
  447. long_desc = product.product_long_description
  448. image_url = product.image_path
  449. ocr_results = None
  450. ocr_text = None
  451. visual_results = None
  452. if process_image and image_url:
  453. logger.info(f"Processing image for product {item_id}...")
  454. # OCR Processing
  455. ocr_service = OCRService()
  456. ocr_results = ocr_service.process_image(image_url)
  457. if ocr_results and ocr_results.get("detected_text"):
  458. ocr_attrs = ProductAttributeService.extract_attributes_from_ocr(
  459. ocr_results, model
  460. )
  461. ocr_results["extracted_attributes"] = ocr_attrs
  462. ocr_text = "\n".join([
  463. f"{item['text']} (confidence: {item['confidence']:.2f})"
  464. for item in ocr_results["detected_text"]
  465. ])
  466. # Visual Processing
  467. visual_service = VisualProcessingService()
  468. product_type_hint = product.product_type if product.product_type else None
  469. visual_results = visual_service.process_image(image_url, product_type_hint)
  470. if visual_results and visual_results.get('visual_attributes'):
  471. visual_results['visual_attributes'] = ProductAttributeService.format_visual_attributes(
  472. visual_results['visual_attributes']
  473. )
  474. logger.info(f"Image processing done for product {item_id}.")
  475. # Combine product text with source tracking
  476. product_text, source_map = ProductAttributeService.combine_product_text(
  477. title=title,
  478. short_desc=short_desc,
  479. long_desc=long_desc,
  480. ocr_text=ocr_text
  481. )
  482. # Attribute Extraction with source tracking
  483. extracted = ProductAttributeService.extract_attributes(
  484. product_text=product_text,
  485. mandatory_attrs=mandatory_attrs,
  486. source_map=source_map,
  487. model=model,
  488. extract_additional=extract_additional,
  489. multiple=multiple,
  490. threshold_abs=threshold_abs,
  491. margin=margin,
  492. use_dynamic_thresholds=use_dynamic_thresholds,
  493. use_adaptive_margin=use_adaptive_margin,
  494. use_semantic_clustering=use_semantic_clustering
  495. )
  496. result = {
  497. "product_id": item_id,
  498. "mandatory": extracted.get("mandatory", {}),
  499. "additional": extracted.get("additional", {}),
  500. }
  501. if ocr_results:
  502. result["ocr_results"] = ocr_results
  503. if visual_results:
  504. result["visual_results"] = visual_results
  505. results.append(result)
  506. successful += 1
  507. logger.info(f"Attribute extraction successful for product {item_id}.")
  508. except Exception as e:
  509. failed += 1
  510. results.append({
  511. "product_id": item_id,
  512. "error": str(e)
  513. })
  514. logger.exception(f"Error during attribute extraction for product {item_id}.")
  515. logger.info(f"Batch extraction phase complete. Successful: {successful}, Failed: {failed}")
  516. # --------------------------------------------------------------------------------
  517. # Generate and save the Excel file
  518. # --------------------------------------------------------------------------------
  519. wb = Workbook()
  520. # Sheet 1: Products (from DB) (Logic is the same, skipped for brevity)
  521. ws_products = wb.active
  522. ws_products.title = "Products"
  523. products_headers = ['ITEM ID', 'PRODUCT NAME', 'PRODUCT TYPE', 'Product Short Description', 'Product Long Description', 'image_path']
  524. header_fill = PatternFill(start_color="366092", end_color="366092", fill_type="solid")
  525. header_font = Font(bold=True, color="FFFFFF")
  526. for col_num, header in enumerate(products_headers, 1):
  527. cell = ws_products.cell(row=1, column=col_num)
  528. cell.value = header
  529. cell.fill = header_fill
  530. cell.font = header_font
  531. cell.alignment = Alignment(horizontal="center", vertical="center")
  532. all_products_db = Product.objects.all()
  533. for row_num, product in enumerate(all_products_db, 2):
  534. ws_products.cell(row=row_num, column=1, value=product.item_id)
  535. ws_products.cell(row=row_num, column=2, value=product.product_name)
  536. ws_products.cell(row=row_num, column=3, value=product.product_type)
  537. ws_products.cell(row=row_num, column=4, value=product.product_short_description)
  538. ws_products.cell(row=row_num, column=5, value=product.product_long_description)
  539. ws_products.cell(row=row_num, column=6, value=product.image_path)
  540. for col_dim, width in zip(['A', 'B', 'C', 'D', 'E', 'F'], [15, 25, 15, 35, 50, 45]):
  541. ws_products.column_dimensions[col_dim].width = width
  542. # Sheet 2: Attribute_values (Logic is the same, skipped for brevity)
  543. ws_attributes = wb.create_sheet("Attribute_values")
  544. attributes_headers = ['item_id', 'attribute_name', 'original_value', 'generated_value']
  545. for col_num, header in enumerate(attributes_headers, 1):
  546. cell = ws_attributes.cell(row=1, column=col_num)
  547. cell.value = header
  548. cell.fill = header_fill
  549. cell.font = header_font
  550. cell.alignment = Alignment(horizontal="center", vertical="center")
  551. row_num = 2
  552. all_original_attrs = ProductAttributeValue.objects.all()
  553. original_attrs_lookup = {
  554. (attr.product.item_id, attr.attribute_name): attr.original_value
  555. for attr in all_original_attrs
  556. }
  557. processed_original_keys = set()
  558. for res in results:
  559. # ... (Excel writing logic for generated/original attributes remains unchanged)
  560. item_id = res["product_id"]
  561. if "error" in res:
  562. for (orig_item_id, orig_attr_name), orig_value in original_attrs_lookup.items():
  563. if orig_item_id == item_id:
  564. ws_attributes.cell(row=row_num, column=1, value=orig_item_id)
  565. ws_attributes.cell(row=row_num, column=2, value=orig_attr_name)
  566. ws_attributes.cell(row=row_num, column=3, value=orig_value)
  567. ws_attributes.cell(row=row_num, column=4, value=f"Extraction Failed: {res['error']}")
  568. processed_original_keys.add((orig_item_id, orig_attr_name))
  569. row_num += 1
  570. continue
  571. generated_attrs = {}
  572. for cat in ["mandatory", "additional"]:
  573. attrs = res.get(cat, {})
  574. for attr_name, values in attrs.items():
  575. for val in values:
  576. key = (item_id, attr_name)
  577. if key not in generated_attrs:
  578. generated_attrs[key] = []
  579. generated_attrs[key].append(f"{val['value']} (source: {val['source']})")
  580. ocr = res.get("ocr_results")
  581. if ocr and "extracted_attributes" in ocr and isinstance(ocr["extracted_attributes"], dict):
  582. for attr_name, values in ocr["extracted_attributes"].items():
  583. for val in values:
  584. key = (item_id, attr_name)
  585. if key not in generated_attrs:
  586. generated_attrs[key] = []
  587. generated_attrs[key].append(f"{val['value']} (source: {val['source']})")
  588. visual = res.get("visual_results")
  589. if visual and "visual_attributes" in visual:
  590. vis_attrs = visual["visual_attributes"]
  591. if isinstance(vis_attrs, dict):
  592. for attr_name, values in vis_attrs.items():
  593. if not isinstance(values, list):
  594. values = [{"value": values, "source": "visual"}]
  595. for val in values:
  596. key = (item_id, attr_name)
  597. if key not in generated_attrs:
  598. generated_attrs[key] = []
  599. generated_attrs[key].append(f"{val['value']} (source: {val.get('source', 'visual')})")
  600. elif isinstance(vis_attrs, list):
  601. for item in vis_attrs:
  602. attr_name = item.get("attribute_name") or item.get("name")
  603. if not attr_name: continue
  604. value = item.get("value", "")
  605. source = item.get("source", "visual")
  606. key = (item_id, attr_name)
  607. if key not in generated_attrs:
  608. generated_attrs[key] = []
  609. generated_attrs[key].append(f"{value} (source: {source})")
  610. for (attr_item_id, attr_name), gen_values in generated_attrs.items():
  611. original_value = original_attrs_lookup.get((attr_item_id, attr_name), "")
  612. generated_value = "; ".join(gen_values) if gen_values else ""
  613. ws_attributes.cell(row=row_num, column=1, value=attr_item_id)
  614. ws_attributes.cell(row=row_num, column=2, value=attr_name)
  615. ws_attributes.cell(row=row_num, column=3, value=original_value)
  616. ws_attributes.cell(row=row_num, column=4, value=generated_value)
  617. processed_original_keys.add((attr_item_id, attr_name))
  618. row_num += 1
  619. for (orig_item_id, orig_attr_name), orig_value in original_attrs_lookup.items():
  620. if orig_item_id == item_id and (orig_item_id, orig_attr_name) not in processed_original_keys:
  621. ws_attributes.cell(row=row_num, column=1, value=orig_item_id)
  622. ws_attributes.cell(row=row_num, column=2, value=orig_attr_name)
  623. ws_attributes.cell(row=row_num, column=3, value=orig_value)
  624. ws_attributes.cell(row=row_num, column=4, value="")
  625. processed_original_keys.add((orig_item_id, orig_attr_name))
  626. row_num += 1
  627. for (orig_item_id, orig_attr_name), orig_value in original_attrs_lookup.items():
  628. if (orig_item_id, orig_attr_name) not in processed_original_keys:
  629. ws_attributes.cell(row=row_num, column=1, value=orig_item_id)
  630. ws_attributes.cell(row=row_num, column=2, value=orig_attr_name)
  631. ws_attributes.cell(row=row_num, column=3, value=orig_value)
  632. ws_attributes.cell(row=row_num, column=4, value="Original value only (Product not processed in batch)")
  633. row_num += 1
  634. for col_dim, width in zip(['A', 'B', 'C', 'D'], [15, 35, 50, 50]):
  635. ws_attributes.column_dimensions[col_dim].width = width
  636. # FIX: Save to the new EXCEL_FILE_PATH
  637. wb.save(EXCEL_FILE_PATH)
  638. logger.info(f"Excel file successfully saved to {EXCEL_FILE_PATH}")
  639. # Write SUCCESS status
  640. write_status("SUCCESS")
  641. logger.info("Background task finished successfully.")
  642. except Exception as e:
  643. # Log the critical error and write FAILED status
  644. logger.exception("CRITICAL ERROR during background Excel generation process.")
  645. write_status("FAILED", error_msg=str(e))
  646. # -------------------------------------------------------------------------------------------------
  647. class ProductUploadExcelView(APIView):
  648. """
  649. POST API to upload an Excel file.
  650. """
  651. parser_classes = (MultiPartParser, FormParser)
  652. def post(self, request, *args, **kwargs):
  653. file_obj = request.FILES.get('file')
  654. if not file_obj:
  655. return Response({'error': 'No file provided'}, status=status.HTTP_400_BAD_REQUEST)
  656. try:
  657. # ... (Upload and DB processing logic remains unchanged)
  658. # Read all sheets from Excel file
  659. excel_file = pd.ExcelFile(file_obj)
  660. # Check if required sheets exist
  661. if 'Products' not in excel_file.sheet_names:
  662. logger.error(f"Upload failed: Missing 'Products' sheet in file.")
  663. return Response({
  664. 'error': "Missing 'Products' sheet",
  665. 'available_sheets': excel_file.sheet_names
  666. }, status=status.HTTP_400_BAD_REQUEST)
  667. df_products = pd.read_excel(excel_file, sheet_name='Products')
  668. df_products.columns = [c.strip().lower().replace(' ', '_') for c in df_products.columns]
  669. expected_product_cols = {
  670. 'item_id', 'product_name', 'product_long_description',
  671. 'product_short_description', 'product_type', 'image_path'
  672. }
  673. if not expected_product_cols.issubset(df_products.columns):
  674. logger.error(f"Upload failed: Missing required columns in Products sheet.")
  675. return Response({
  676. 'error': 'Missing required columns in Products sheet',
  677. 'required_columns': list(expected_product_cols),
  678. 'found_columns': list(df_products.columns)
  679. }, status=status.HTTP_400_BAD_REQUEST)
  680. df_attributes = None
  681. has_attributes_sheet = 'Attribute_values' in excel_file.sheet_names
  682. if has_attributes_sheet:
  683. df_attributes = pd.read_excel(excel_file, sheet_name='Attribute_values')
  684. df_attributes.columns = [c.strip().lower().replace(' ', '_') for c in df_attributes.columns]
  685. expected_attr_cols = {'item_id', 'attribute_name', 'original_value'}
  686. if not expected_attr_cols.issubset(df_attributes.columns):
  687. logger.error(f"Upload failed: Missing required columns in Attribute_values sheet.")
  688. return Response({
  689. 'error': 'Missing required columns in Attribute_values sheet',
  690. 'required_columns': list(expected_attr_cols),
  691. 'found_columns': list(df_attributes.columns)
  692. }, status=status.HTTP_400_BAD_REQUEST)
  693. products_created = 0
  694. products_updated = 0
  695. attributes_created = 0
  696. attributes_updated = 0
  697. products_failed = 0
  698. attributes_failed = 0
  699. errors = []
  700. with transaction.atomic():
  701. for idx, row in df_products.iterrows():
  702. item_id = str(row.get('item_id', '')).strip()
  703. product_type = str(row.get('product_type', '')).strip()
  704. if not item_id:
  705. products_failed += 1
  706. errors.append(f"Products Row {idx + 2}: Missing item_id")
  707. continue
  708. try:
  709. if product_type:
  710. ProductType.objects.get_or_create(name=product_type)
  711. defaults = {
  712. 'product_name': str(row.get('product_name', '')),
  713. 'product_long_description': str(row.get('product_long_description', '')),
  714. 'product_short_description': str(row.get('product_short_description', '')),
  715. 'product_type': product_type,
  716. 'image_path': str(row.get('image_path', '')),
  717. }
  718. obj, created = Product.objects.update_or_create(item_id=item_id, defaults=defaults)
  719. if created: products_created += 1
  720. else: products_updated += 1
  721. except Exception as e:
  722. products_failed += 1
  723. errors.append(f"Products Row {idx + 2} (item_id: {item_id}): {str(e)}")
  724. logger.error(f"Error processing product {item_id} in Products sheet: {e}")
  725. if has_attributes_sheet and df_attributes is not None:
  726. item_ids_in_attrs = df_attributes['item_id'].astype(str).unique()
  727. existing_products = {p.item_id: p for p in Product.objects.filter(item_id__in=item_ids_in_attrs)}
  728. for idx, row in df_attributes.iterrows():
  729. item_id = str(row.get('item_id', '')).strip()
  730. attribute_name = str(row.get('attribute_name', '')).strip()
  731. original_value = str(row.get('original_value', '')).strip()
  732. if not item_id or not attribute_name:
  733. attributes_failed += 1
  734. errors.append(f"Attribute_values Row {idx + 2}: Missing item_id or attribute_name")
  735. continue
  736. product = existing_products.get(item_id)
  737. if not product:
  738. attributes_failed += 1
  739. errors.append(f"Attribute_values Row {idx + 2}: Product with item_id '{item_id}' not found. Make sure it exists in Products sheet.")
  740. continue
  741. try:
  742. attr_obj, created = ProductAttributeValue.objects.update_or_create(
  743. product=product,
  744. attribute_name=attribute_name,
  745. defaults={'original_value': original_value}
  746. )
  747. if created: attributes_created += 1
  748. else: attributes_updated += 1
  749. except Exception as e:
  750. attributes_failed += 1
  751. errors.append(f"Attribute_values Row {idx + 2} (item_id: {item_id}, attribute: {attribute_name}): {str(e)}")
  752. logger.error(f"Error processing attribute {attribute_name} for product {item_id}: {e}")
  753. # Prepare response data
  754. response_data = {
  755. 'message': 'Upload completed',
  756. 'products': {
  757. 'created': products_created, 'updated': products_updated, 'failed': products_failed,
  758. 'total_processed': products_created + products_updated + products_failed
  759. },
  760. 'attribute_values': {
  761. 'created': attributes_created, 'updated': attributes_updated, 'failed': attributes_failed,
  762. 'total_processed': attributes_created + attributes_updated + attributes_failed
  763. } if has_attributes_sheet else {'message': 'Attribute_values sheet not found in Excel file'},
  764. 'generated_excel_status': 'Excel generation started in the background.'
  765. }
  766. if errors:
  767. response_data['errors'] = errors[:50]
  768. if len(errors) > 50:
  769. response_data['errors'].append(f"... and {len(errors) - 50} more errors")
  770. upload_status = status.HTTP_201_CREATED if products_failed == 0 and attributes_failed == 0 else status.HTTP_207_MULTI_STATUS
  771. # Start background thread for Excel generation if upload was successful
  772. if products_failed == 0 and attributes_failed == 0:
  773. logger.info("API call successful. Triggering background Excel generation thread.")
  774. threading.Thread(target=generate_product_excel_background, daemon=True).start()
  775. # FIX: Update monitoring URLs to point to the new generated_outputs subfolder
  776. response_data['generated_excel_status'] = 'Background Excel generation triggered successfully.'
  777. response_data['monitoring'] = {
  778. 'excel_file': os.path.join(OUTPUT_URL, EXCEL_FILE_NAME),
  779. 'status_file': os.path.join(OUTPUT_URL, STATUS_FILE_NAME),
  780. 'log_file': os.path.join(OUTPUT_URL, LOG_FILE_NAME),
  781. 'note': 'These files will be available once the background process completes.'
  782. }
  783. else:
  784. logger.warning(f"API call finished with errors ({products_failed} products, {attributes_failed} attributes). Not triggering background excel generation.")
  785. response_data['generated_excel_status'] = 'Background Excel generation was NOT triggered due to upload errors. Fix upload errors and re-upload.'
  786. return Response(response_data, status=upload_status)
  787. except pd.errors.EmptyDataError:
  788. logger.error('The uploaded Excel file is empty or invalid.')
  789. return Response({'error': 'The uploaded Excel file is empty or invalid'}, status=status.HTTP_400_BAD_REQUEST)
  790. except Exception as e:
  791. logger.exception(f'An unexpected error occurred while processing the file.')
  792. return Response({'error': f'An unexpected error occurred while processing the file: {str(e)}'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
  793. class DownloadExcelTemplateView(APIView):
  794. """
  795. GET API to download an Excel template with two sheets:
  796. 1. Products sheet with sample data
  797. 2. Attribute_values sheet with sample data
  798. """
  799. def get(self, request):
  800. # Create a new workbook
  801. wb = Workbook()
  802. # Remove default sheet
  803. if 'Sheet' in wb.sheetnames:
  804. wb.remove(wb['Sheet'])
  805. # ===== Create Products Sheet =====
  806. ws_products = wb.create_sheet("Products", 0)
  807. # Define headers for Products
  808. products_headers = [
  809. 'ITEM ID',
  810. 'PRODUCT NAME',
  811. 'PRODUCT TYPE',
  812. 'Product Short Description',
  813. 'Product Long Description',
  814. 'image_path'
  815. ]
  816. # Style for headers
  817. header_fill = PatternFill(start_color="366092", end_color="366092", fill_type="solid")
  818. header_font = Font(bold=True, color="FFFFFF")
  819. # Add headers to Products sheet
  820. for col_num, header in enumerate(products_headers, 1):
  821. cell = ws_products.cell(row=1, column=col_num)
  822. cell.value = header
  823. cell.fill = header_fill
  824. cell.font = header_font
  825. cell.alignment = Alignment(horizontal="center", vertical="center")
  826. # Add sample data to Products sheet
  827. sample_products = [
  828. [
  829. '3217373735',
  830. 'Blue V-Neck T-Shirt',
  831. 'Clothing',
  832. 'Stylish blue t-shirt with v-neck design',
  833. 'Premium quality cotton t-shirt featuring a classic v-neck design. Perfect for casual wear. Available in vibrant blue color.',
  834. 'https://images.unsplash.com/photo-1521572163474-6864f9cf17ab'
  835. ],
  836. [
  837. '1234567890',
  838. 'Red Cotton Dress',
  839. 'Clothing',
  840. 'Beautiful red dress for special occasions',
  841. 'Elegant red dress made from 100% cotton fabric. Features a flowing design perfect for summer events and parties.',
  842. 'https://images.unsplash.com/photo-1595777457583-95e059d581b8'
  843. ],
  844. [
  845. '9876543210',
  846. 'Steel Screws Pack',
  847. 'Hardware',
  848. 'Pack of zinc plated steel screws',
  849. 'Professional grade steel screws with zinc plating for corrosion resistance. Pack contains 50 pieces, 2 inch length, M6 thread size.',
  850. 'https://images.unsplash.com/photo-1542272604-787c3835535d'
  851. ]
  852. ]
  853. for row_num, row_data in enumerate(sample_products, 2):
  854. for col_num, value in enumerate(row_data, 1):
  855. ws_products.cell(row=row_num, column=col_num, value=value)
  856. # Adjust column widths for Products sheet
  857. ws_products.column_dimensions['A'].width = 15 # ITEM ID
  858. ws_products.column_dimensions['B'].width = 25 # PRODUCT NAME
  859. ws_products.column_dimensions['C'].width = 15 # PRODUCT TYPE
  860. ws_products.column_dimensions['D'].width = 35 # Short Description
  861. ws_products.column_dimensions['E'].width = 50 # Long Description
  862. ws_products.column_dimensions['F'].width = 45 # image_path
  863. # ===== Create Attribute_values Sheet =====
  864. ws_attributes = wb.create_sheet("Attribute_values", 1)
  865. # Define headers for Attribute_values
  866. attributes_headers = ['item_id', 'attribute_name', 'original_value']
  867. # Add headers to Attribute_values sheet
  868. for col_num, header in enumerate(attributes_headers, 1):
  869. cell = ws_attributes.cell(row=1, column=col_num)
  870. cell.value = header
  871. cell.fill = header_fill
  872. cell.font = header_font
  873. cell.alignment = Alignment(horizontal="center", vertical="center")
  874. # Add sample data to Attribute_values sheet
  875. sample_attributes = [
  876. ['3217373735', 'Clothing Neck Style', 'V-Neck Square'],
  877. ['3217373735', 'Condition', 'New with tags'],
  878. ['3217373735', 'Material', '100% Cotton'],
  879. ['3217373735', 'Color', 'Sky Blue'],
  880. ['3217373735', 'Size', 'Medium'],
  881. ['1234567890', 'Sleeve Length', 'Sleeveless'],
  882. ['1234567890', 'Condition', 'Brand New'],
  883. ['1234567890', 'Pattern', 'Solid'],
  884. ['1234567890', 'Material', 'Cotton Blend'],
  885. ['1234567890', 'Color', 'Crimson Red'],
  886. ['9876543210', 'Material', 'Stainless Steel'],
  887. ['9876543210', 'Thread Size', 'M6'],
  888. ['9876543210', 'Length', '2 inches'],
  889. ['9876543210', 'Coating', 'Zinc Plated'],
  890. ['9876543210', 'Package Quantity', '50 pieces'],
  891. ]
  892. for row_num, row_data in enumerate(sample_attributes, 2):
  893. for col_num, value in enumerate(row_data, 1):
  894. ws_attributes.cell(row=row_num, column=col_num, value=value)
  895. # Adjust column widths for Attribute_values sheet
  896. ws_attributes.column_dimensions['A'].width = 15 # item_id
  897. ws_attributes.column_dimensions['B'].width = 25 # attribute_name
  898. ws_attributes.column_dimensions['C'].width = 30 # original_value
  899. # Add instructions sheet
  900. ws_instructions = wb.create_sheet("Instructions", 2)
  901. instructions_text = [
  902. ['Excel Upload Instructions', ''],
  903. ['', ''],
  904. ['Sheet 1: Products', ''],
  905. ['- Contains product basic information', ''],
  906. ['- All columns are required', ''],
  907. ['- ITEM ID must be unique', ''],
  908. ['', ''],
  909. ['Sheet 2: Attribute_values', ''],
  910. ['- Contains original/manual attribute values', ''],
  911. ['- item_id must match an ITEM ID from Products sheet', ''],
  912. ['- Multiple rows can have the same item_id (for different attributes)', ''],
  913. ['- Each attribute per product should be on a separate row', ''],
  914. ['', ''],
  915. ['Upload Process:', ''],
  916. ['1. Fill in your product data in the Products sheet', ''],
  917. ['2. Fill in attribute values in the Attribute_values sheet', ''],
  918. ['3. Ensure item_id values match between both sheets', ''],
  919. ['4. Save the file and upload via API', ''],
  920. ['', ''],
  921. ['Notes:', ''],
  922. ['- Do not change sheet names (must be "Products" and "Attribute_values")', ''],
  923. ['- Do not change column header names', ''],
  924. ['- You can delete the sample data rows', ''],
  925. ['- You can delete this Instructions sheet before uploading', ''],
  926. ]
  927. for row_num, row_data in enumerate(instructions_text, 1):
  928. ws_instructions.cell(row=row_num, column=1, value=row_data[0])
  929. if row_num == 1:
  930. cell = ws_instructions.cell(row=row_num, column=1)
  931. cell.font = Font(bold=True, size=14)
  932. ws_instructions.column_dimensions['A'].width = 60
  933. # Save to BytesIO
  934. output = io.BytesIO()
  935. wb.save(output)
  936. output.seek(0)
  937. # Create response
  938. response = HttpResponse(
  939. output.getvalue(),
  940. content_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
  941. )
  942. response['Content-Disposition'] = 'attachment; filename=product_upload_template.xlsx'
  943. return response
  944. class DownloadProductsWithAttributesExcelView(APIView):
  945. """
  946. GET API to download existing products with their attribute values as Excel.
  947. Useful for users to update existing data.
  948. """
  949. def get(self, request):
  950. from .models import Product, ProductAttributeValue
  951. # Create workbook
  952. wb = Workbook()
  953. if 'Sheet' in wb.sheetnames:
  954. wb.remove(wb['Sheet'])
  955. # ===== Products Sheet =====
  956. ws_products = wb.create_sheet("Products", 0)
  957. # Headers
  958. products_headers = [
  959. 'ITEM ID', 'PRODUCT NAME', 'PRODUCT TYPE',
  960. 'Product Short Description', 'Product Long Description', 'image_path'
  961. ]
  962. header_fill = PatternFill(start_color="366092", end_color="366092", fill_type="solid")
  963. header_font = Font(bold=True, color="FFFFFF")
  964. for col_num, header in enumerate(products_headers, 1):
  965. cell = ws_products.cell(row=1, column=col_num)
  966. cell.value = header
  967. cell.fill = header_fill
  968. cell.font = header_font
  969. cell.alignment = Alignment(horizontal="center", vertical="center")
  970. # Fetch and add product data
  971. products = Product.objects.all()
  972. for row_num, product in enumerate(products, 2):
  973. ws_products.cell(row=row_num, column=1, value=product.item_id)
  974. ws_products.cell(row=row_num, column=2, value=product.product_name)
  975. ws_products.cell(row=row_num, column=3, value=product.product_type)
  976. ws_products.cell(row=row_num, column=4, value=product.product_short_description)
  977. ws_products.cell(row=row_num, column=5, value=product.product_long_description)
  978. ws_products.cell(row=row_num, column=6, value=product.image_path)
  979. # Adjust widths
  980. ws_products.column_dimensions['A'].width = 15
  981. ws_products.column_dimensions['B'].width = 25
  982. ws_products.column_dimensions['C'].width = 15
  983. ws_products.column_dimensions['D'].width = 35
  984. ws_products.column_dimensions['E'].width = 50
  985. ws_products.column_dimensions['F'].width = 45
  986. # ===== Attribute_values Sheet =====
  987. ws_attributes = wb.create_sheet("Attribute_values", 1)
  988. attributes_headers = ['item_id', 'attribute_name', 'original_value']
  989. for col_num, header in enumerate(attributes_headers, 1):
  990. cell = ws_attributes.cell(row=1, column=col_num)
  991. cell.value = header
  992. cell.fill = header_fill
  993. cell.font = header_font
  994. cell.alignment = Alignment(horizontal="center", vertical="center")
  995. # Fetch and add attribute values
  996. attributes = ProductAttributeValue.objects.select_related('product').all()
  997. for row_num, attr in enumerate(attributes, 2):
  998. ws_attributes.cell(row=row_num, column=1, value=attr.product.item_id)
  999. ws_attributes.cell(row=row_num, column=2, value=attr.attribute_name)
  1000. ws_attributes.cell(row=row_num, column=3, value=attr.original_value)
  1001. ws_attributes.column_dimensions['A'].width = 15
  1002. ws_attributes.column_dimensions['B'].width = 25
  1003. ws_attributes.column_dimensions['C'].width = 30
  1004. # Save to BytesIO
  1005. output = io.BytesIO()
  1006. wb.save(output)
  1007. output.seek(0)
  1008. response = HttpResponse(
  1009. output.getvalue(),
  1010. content_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
  1011. )
  1012. response['Content-Disposition'] = 'attachment; filename=products_export.xlsx'
  1013. return response
  1014. class ProductAttributesUploadView(APIView):
  1015. """
  1016. POST API to upload an Excel file and add mandatory/additional attributes
  1017. for product types with possible values.
  1018. """
  1019. parser_classes = (MultiPartParser, FormParser)
  1020. def post(self, request):
  1021. file_obj = request.FILES.get('file')
  1022. if not file_obj:
  1023. return Response({"error": "No file provided."}, status=status.HTTP_400_BAD_REQUEST)
  1024. try:
  1025. df = pd.read_excel(file_obj)
  1026. required_columns = {'product_type', 'attribute_name', 'is_mandatory', 'possible_values'}
  1027. if not required_columns.issubset(df.columns):
  1028. return Response({
  1029. "error": f"Missing required columns. Found: {list(df.columns)}"
  1030. }, status=status.HTTP_400_BAD_REQUEST)
  1031. for _, row in df.iterrows():
  1032. product_type_name = str(row['product_type']).strip()
  1033. attr_name = str(row['attribute_name']).strip()
  1034. is_mandatory = str(row['is_mandatory']).strip().lower() in ['yes', 'true', '1']
  1035. possible_values = str(row.get('possible_values', '')).strip()
  1036. # Get or create product type
  1037. product_type, _ = ProductType.objects.get_or_create(name=product_type_name)
  1038. # Get or create attribute
  1039. attribute, _ = ProductAttribute.objects.get_or_create(
  1040. product_type=product_type,
  1041. name=attr_name,
  1042. defaults={'is_mandatory': is_mandatory}
  1043. )
  1044. attribute.is_mandatory = is_mandatory
  1045. attribute.save()
  1046. # Handle possible values
  1047. AttributePossibleValue.objects.filter(attribute=attribute).delete()
  1048. if possible_values:
  1049. for val in [v.strip() for v in possible_values.split(',') if v.strip()]:
  1050. AttributePossibleValue.objects.create(attribute=attribute, value=val)
  1051. return Response({"message": "Attributes uploaded successfully."}, status=status.HTTP_201_CREATED)
  1052. except Exception as e:
  1053. return Response({"error": str(e)}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
  1054. class ProductTypeAttributesView(APIView):
  1055. """
  1056. API to view, create, update, and delete product type attributes and their possible values.
  1057. Also supports dynamic product type creation.
  1058. """
  1059. def get(self, request):
  1060. """
  1061. Retrieve all product types with their attributes and possible values.
  1062. """
  1063. product_types = ProductType.objects.all()
  1064. serializer = ProductTypeSerializer(product_types, many=True)
  1065. # Transform the serialized data into the requested format
  1066. result = []
  1067. for pt in serializer.data:
  1068. for attr in pt['attributes']:
  1069. result.append({
  1070. 'product_type': pt['name'],
  1071. 'attribute_name': attr['name'],
  1072. 'is_mandatory': 'Yes' if attr['is_mandatory'] else 'No',
  1073. 'possible_values': ', '.join([pv['value'] for pv in attr['possible_values']])
  1074. })
  1075. return Response(result, status=status.HTTP_200_OK)
  1076. def post(self, request):
  1077. """
  1078. Create a new product type or attribute with possible values.
  1079. Expected payload example:
  1080. {
  1081. "product_type": "Hardware Screws",
  1082. "attribute_name": "Material",
  1083. "is_mandatory": "Yes",
  1084. "possible_values": "Steel, Zinc Plated, Stainless Steel"
  1085. }
  1086. """
  1087. try:
  1088. product_type_name = request.data.get('product_type')
  1089. attribute_name = request.data.get('attribute_name', '')
  1090. is_mandatory = request.data.get('is_mandatory', '').lower() in ['yes', 'true', '1']
  1091. possible_values = request.data.get('possible_values', '')
  1092. if not product_type_name:
  1093. return Response({
  1094. "error": "product_type is required"
  1095. }, status=status.HTTP_400_BAD_REQUEST)
  1096. with transaction.atomic():
  1097. # Get or create product type
  1098. product_type, created = ProductType.objects.get_or_create(name=product_type_name)
  1099. if created and not attribute_name:
  1100. return Response({
  1101. "message": f"Product type '{product_type_name}' created successfully",
  1102. "data": {"product_type": product_type_name}
  1103. }, status=status.HTTP_201_CREATED)
  1104. if attribute_name:
  1105. # Create attribute
  1106. attribute, attr_created = ProductAttribute.objects.get_or_create(
  1107. product_type=product_type,
  1108. name=attribute_name,
  1109. defaults={'is_mandatory': is_mandatory}
  1110. )
  1111. if not attr_created:
  1112. return Response({
  1113. "error": f"Attribute '{attribute_name}' already exists for product type '{product_type_name}'"
  1114. }, status=status.HTTP_400_BAD_REQUEST)
  1115. # Handle possible values
  1116. if possible_values:
  1117. for val in [v.strip() for v in possible_values.split(',') if v.strip()]:
  1118. AttributePossibleValue.objects.create(attribute=attribute, value=val)
  1119. return Response({
  1120. "message": "Attribute created successfully",
  1121. "data": {
  1122. "product_type": product_type_name,
  1123. "attribute_name": attribute_name,
  1124. "is_mandatory": "Yes" if is_mandatory else "No",
  1125. "possible_values": possible_values
  1126. }
  1127. }, status=status.HTTP_201_CREATED)
  1128. return Response({
  1129. "message": f"Product type '{product_type_name}' already exists",
  1130. "data": {"product_type": product_type_name}
  1131. }, status=status.HTTP_200_OK)
  1132. except Exception as e:
  1133. return Response({"error": str(e)}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
  1134. def put(self, request):
  1135. """
  1136. Update an existing product type attribute and its possible values.
  1137. Expected payload example:
  1138. {
  1139. "product_type": "Hardware Screws",
  1140. "attribute_name": "Material",
  1141. "is_mandatory": "Yes",
  1142. "possible_values": "Steel, Zinc Plated, Stainless Steel, Brass"
  1143. }
  1144. """
  1145. try:
  1146. product_type_name = request.data.get('product_type')
  1147. attribute_name = request.data.get('attribute_name')
  1148. is_mandatory = request.data.get('is_mandatory', '').lower() in ['yes', 'true', '1']
  1149. possible_values = request.data.get('possible_values', '')
  1150. if not all([product_type_name, attribute_name]):
  1151. return Response({
  1152. "error": "product_type and attribute_name are required"
  1153. }, status=status.HTTP_400_BAD_REQUEST)
  1154. with transaction.atomic():
  1155. try:
  1156. product_type = ProductType.objects.get(name=product_type_name)
  1157. attribute = ProductAttribute.objects.get(
  1158. product_type=product_type,
  1159. name=attribute_name
  1160. )
  1161. except ProductType.DoesNotExist:
  1162. return Response({
  1163. "error": f"Product type '{product_type_name}' not found"
  1164. }, status=status.HTTP_404_NOT_FOUND)
  1165. except ProductAttribute.DoesNotExist:
  1166. return Response({
  1167. "error": f"Attribute '{attribute_name}' not found for product type '{product_type_name}'"
  1168. }, status=status.HTTP_404_NOT_FOUND)
  1169. # Update attribute
  1170. attribute.is_mandatory = is_mandatory
  1171. attribute.save()
  1172. # Update possible values
  1173. AttributePossibleValue.objects.filter(attribute=attribute).delete()
  1174. if possible_values:
  1175. for val in [v.strip() for v in possible_values.split(',') if v.strip()]:
  1176. AttributePossibleValue.objects.create(attribute=attribute, value=val)
  1177. return Response({
  1178. "message": "Attribute updated successfully",
  1179. "data": {
  1180. "product_type": product_type_name,
  1181. "attribute_name": attribute_name,
  1182. "is_mandatory": "Yes" if is_mandatory else "No",
  1183. "possible_values": possible_values
  1184. }
  1185. }, status=status.HTTP_200_OK)
  1186. except Exception as e:
  1187. return Response({"error": str(e)}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
  1188. def delete(self, request):
  1189. """
  1190. Delete a product type or a specific attribute.
  1191. Expected payload example:
  1192. {
  1193. "product_type": "Hardware Screws",
  1194. "attribute_name": "Material"
  1195. }
  1196. """
  1197. try:
  1198. product_type_name = request.data.get('product_type')
  1199. attribute_name = request.data.get('attribute_name', '')
  1200. if not product_type_name:
  1201. return Response({
  1202. "error": "product_type is required"
  1203. }, status=status.HTTP_400_BAD_REQUEST)
  1204. with transaction.atomic():
  1205. try:
  1206. product_type = ProductType.objects.get(name=product_type_name)
  1207. except ProductType.DoesNotExist:
  1208. return Response({
  1209. "error": f"Product type '{product_type_name}' not found"
  1210. }, status=status.HTTP_404_NOT_FOUND)
  1211. if attribute_name:
  1212. # Delete specific attribute
  1213. try:
  1214. attribute = ProductAttribute.objects.get(
  1215. product_type=product_type,
  1216. name=attribute_name
  1217. )
  1218. attribute.delete()
  1219. return Response({
  1220. "message": f"Attribute '{attribute_name}' deleted successfully from product type '{product_type_name}'"
  1221. }, status=status.HTTP_200_OK)
  1222. except ProductAttribute.DoesNotExist:
  1223. return Response({
  1224. "error": f"Attribute '{attribute_name}' not found for product type '{product_type_name}'"
  1225. }, status=status.HTTP_404_NOT_FOUND)
  1226. else:
  1227. # Delete entire product type
  1228. product_type.delete()
  1229. return Response({
  1230. "message": f"Product type '{product_type_name}' and all its attributes deleted successfully"
  1231. }, status=status.HTTP_200_OK)
  1232. except Exception as e:
  1233. return Response({"error": str(e)}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
  1234. class ProductTypeListView(APIView):
  1235. """
  1236. GET API to list all product types (only names).
  1237. """
  1238. def get(self, request):
  1239. product_types = ProductType.objects.values_list('name', flat=True)
  1240. return Response({"product_types": list(product_types)}, status=status.HTTP_200_OK)
  1241. class ProductAttributeValueView(APIView):
  1242. """
  1243. API to manage manually entered original attribute values.
  1244. GET: Retrieve all attribute values for a product
  1245. POST: Create or update attribute values for a product
  1246. DELETE: Delete attribute values
  1247. """
  1248. def get(self, request):
  1249. """
  1250. Get original attribute values for a specific product or all products.
  1251. Query params: item_id (optional)
  1252. """
  1253. item_id = request.query_params.get('item_id')
  1254. if item_id:
  1255. try:
  1256. product = Product.objects.get(item_id=item_id)
  1257. values = ProductAttributeValue.objects.filter(product=product)
  1258. serializer = ProductAttributeValueSerializer(values, many=True)
  1259. return Response({
  1260. "item_id": item_id,
  1261. "attributes": serializer.data
  1262. }, status=status.HTTP_200_OK)
  1263. except Product.DoesNotExist:
  1264. return Response({
  1265. "error": f"Product with item_id '{item_id}' not found"
  1266. }, status=status.HTTP_404_NOT_FOUND)
  1267. else:
  1268. # Return all attribute values grouped by product
  1269. values = ProductAttributeValue.objects.all().select_related('product')
  1270. serializer = ProductAttributeValueSerializer(values, many=True)
  1271. return Response(serializer.data, status=status.HTTP_200_OK)
  1272. def post(self, request):
  1273. """
  1274. Create or update original attribute value for a product.
  1275. Expected payload:
  1276. {
  1277. "item_id": "3217373735",
  1278. "attribute_name": "Clothing Neck Style",
  1279. "original_value": "V-Neck Square"
  1280. }
  1281. """
  1282. serializer = ProductAttributeValueInputSerializer(data=request.data)
  1283. if not serializer.is_valid():
  1284. return Response({"error": serializer.errors}, status=status.HTTP_400_BAD_REQUEST)
  1285. validated_data = serializer.validated_data
  1286. item_id = validated_data['item_id']
  1287. attribute_name = validated_data['attribute_name']
  1288. original_value = validated_data['original_value']
  1289. try:
  1290. product = Product.objects.get(item_id=item_id)
  1291. except Product.DoesNotExist:
  1292. return Response({
  1293. "error": f"Product with item_id '{item_id}' not found"
  1294. }, status=status.HTTP_404_NOT_FOUND)
  1295. # Create or update the attribute value
  1296. attr_value, created = ProductAttributeValue.objects.update_or_create(
  1297. product=product,
  1298. attribute_name=attribute_name,
  1299. defaults={'original_value': original_value}
  1300. )
  1301. response_serializer = ProductAttributeValueSerializer(attr_value)
  1302. return Response({
  1303. "message": "Attribute value created" if created else "Attribute value updated",
  1304. "data": response_serializer.data
  1305. }, status=status.HTTP_201_CREATED if created else status.HTTP_200_OK)
  1306. def delete(self, request):
  1307. """
  1308. Delete original attribute value(s).
  1309. Expected payload:
  1310. {
  1311. "item_id": "3217373735",
  1312. "attribute_name": "Clothing Neck Style" # Optional, if not provided deletes all for product
  1313. }
  1314. """
  1315. item_id = request.data.get('item_id')
  1316. attribute_name = request.data.get('attribute_name')
  1317. if not item_id:
  1318. return Response({
  1319. "error": "item_id is required"
  1320. }, status=status.HTTP_400_BAD_REQUEST)
  1321. try:
  1322. product = Product.objects.get(item_id=item_id)
  1323. except Product.DoesNotExist:
  1324. return Response({
  1325. "error": f"Product with item_id '{item_id}' not found"
  1326. }, status=status.HTTP_404_NOT_FOUND)
  1327. if attribute_name:
  1328. # Delete specific attribute
  1329. deleted_count, _ = ProductAttributeValue.objects.filter(
  1330. product=product,
  1331. attribute_name=attribute_name
  1332. ).delete()
  1333. if deleted_count == 0:
  1334. return Response({
  1335. "error": f"Attribute '{attribute_name}' not found for product '{item_id}'"
  1336. }, status=status.HTTP_404_NOT_FOUND)
  1337. return Response({
  1338. "message": f"Attribute '{attribute_name}' deleted successfully"
  1339. }, status=status.HTTP_200_OK)
  1340. else:
  1341. # Delete all attributes for product
  1342. deleted_count, _ = ProductAttributeValue.objects.filter(product=product).delete()
  1343. return Response({
  1344. "message": f"Deleted {deleted_count} attribute(s) for product '{item_id}'"
  1345. }, status=status.HTTP_200_OK)
  1346. class BulkProductAttributeValueView(APIView):
  1347. """
  1348. API for bulk operations on original attribute values.
  1349. POST: Create/update multiple attribute values at once
  1350. """
  1351. def post(self, request):
  1352. """
  1353. Bulk create or update attribute values for multiple products.
  1354. Expected payload:
  1355. {
  1356. "products": [
  1357. {
  1358. "item_id": "3217373735",
  1359. "attributes": {
  1360. "Clothing Neck Style": "V-Neck Square",
  1361. "Condition": "New with tags"
  1362. }
  1363. },
  1364. {
  1365. "item_id": "1234567890",
  1366. "attributes": {
  1367. "Material": "Cotton",
  1368. "Size": "L"
  1369. }
  1370. }
  1371. ]
  1372. }
  1373. """
  1374. products_data = request.data.get('products', [])
  1375. if not products_data:
  1376. return Response({
  1377. "error": "products list is required"
  1378. }, status=status.HTTP_400_BAD_REQUEST)
  1379. results = []
  1380. successful = 0
  1381. failed = 0
  1382. with transaction.atomic():
  1383. for product_data in products_data:
  1384. serializer = BulkProductAttributeValueSerializer(data=product_data)
  1385. if not serializer.is_valid():
  1386. failed += 1
  1387. results.append({
  1388. "item_id": product_data.get('item_id'),
  1389. "status": "failed",
  1390. "error": serializer.errors
  1391. })
  1392. continue
  1393. validated_data = serializer.validated_data
  1394. item_id = validated_data['item_id']
  1395. attributes = validated_data['attributes']
  1396. try:
  1397. product = Product.objects.get(item_id=item_id)
  1398. created_count = 0
  1399. updated_count = 0
  1400. for attr_name, original_value in attributes.items():
  1401. _, created = ProductAttributeValue.objects.update_or_create(
  1402. product=product,
  1403. attribute_name=attr_name,
  1404. defaults={'original_value': original_value}
  1405. )
  1406. if created:
  1407. created_count += 1
  1408. else:
  1409. updated_count += 1
  1410. successful += 1
  1411. results.append({
  1412. "item_id": item_id,
  1413. "status": "success",
  1414. "created": created_count,
  1415. "updated": updated_count
  1416. })
  1417. except Product.DoesNotExist:
  1418. failed += 1
  1419. results.append({
  1420. "item_id": item_id,
  1421. "status": "failed",
  1422. "error": f"Product not found"
  1423. })
  1424. return Response({
  1425. "results": results,
  1426. "total_products": len(products_data),
  1427. "successful": successful,
  1428. "failed": failed
  1429. }, status=status.HTTP_200_OK)
  1430. class ProductListWithAttributesView(APIView):
  1431. """
  1432. GET API to list all products with their original attribute values.
  1433. """
  1434. def get(self, request):
  1435. item_id = request.query_params.get('item_id')
  1436. if item_id:
  1437. try:
  1438. product = Product.objects.get(item_id=item_id)
  1439. serializer = ProductWithAttributesSerializer(product)
  1440. return Response(serializer.data, status=status.HTTP_200_OK)
  1441. except Product.DoesNotExist:
  1442. return Response({
  1443. "error": f"Product with item_id '{item_id}' not found"
  1444. }, status=status.HTTP_404_NOT_FOUND)
  1445. else:
  1446. products = Product.objects.all()
  1447. serializer = ProductWithAttributesSerializer(products, many=True)
  1448. return Response(serializer.data, status=status.HTTP_200_OK)
  1449. class CacheManagementView(APIView):
  1450. """
  1451. API endpoint to manage caching system.
  1452. GET: Get current cache statistics and configuration
  1453. POST: Enable/disable caching or clear caches
  1454. """
  1455. def get(self, request):
  1456. """
  1457. Get current cache configuration and statistics.
  1458. """
  1459. config = cache_config.get_cache_config()
  1460. stats = ProductAttributeService.get_cache_stats()
  1461. return Response({
  1462. "configuration": config,
  1463. "statistics": stats,
  1464. "message": "Cache status retrieved successfully"
  1465. }, status=status.HTTP_200_OK)
  1466. def post(self, request):
  1467. """
  1468. Manage cache settings.
  1469. Expected payload examples:
  1470. 1. Enable/disable caching:
  1471. {
  1472. "action": "toggle",
  1473. "enable": true // or false
  1474. }
  1475. 2. Clear all caches:
  1476. {
  1477. "action": "clear"
  1478. }
  1479. 3. Clear specific cache:
  1480. {
  1481. "action": "clear",
  1482. "cache_type": "embedding" // or "attribute" or "clip"
  1483. }
  1484. 4. Get statistics:
  1485. {
  1486. "action": "stats"
  1487. }
  1488. """
  1489. action = request.data.get('action')
  1490. if not action:
  1491. return Response({
  1492. "error": "action is required",
  1493. "valid_actions": ["toggle", "clear", "stats"]
  1494. }, status=status.HTTP_400_BAD_REQUEST)
  1495. # Toggle caching on/off
  1496. if action == "toggle":
  1497. enable = request.data.get('enable')
  1498. if enable is None:
  1499. return Response({
  1500. "error": "enable parameter is required (true/false)"
  1501. }, status=status.HTTP_400_BAD_REQUEST)
  1502. # Update the cache configuration
  1503. cache_config.ENABLE_CACHING = bool(enable)
  1504. cache_config.ENABLE_ATTRIBUTE_EXTRACTION_CACHE = bool(enable)
  1505. cache_config.ENABLE_EMBEDDING_CACHE = bool(enable)
  1506. cache_config.ENABLE_CLIP_MODEL_CACHE = bool(enable)
  1507. status_msg = "enabled" if enable else "disabled"
  1508. return Response({
  1509. "message": f"Caching has been {status_msg}",
  1510. "configuration": cache_config.get_cache_config()
  1511. }, status=status.HTTP_200_OK)
  1512. # Clear caches
  1513. elif action == "clear":
  1514. cache_type = request.data.get('cache_type', 'all')
  1515. if cache_type == 'all':
  1516. ProductAttributeService.clear_all_caches()
  1517. VisualProcessingService.clear_clip_cache()
  1518. message = "All caches cleared successfully"
  1519. elif cache_type == 'embedding':
  1520. from .services import EmbeddingCache
  1521. EmbeddingCache.clear()
  1522. message = "Embedding cache cleared successfully"
  1523. elif cache_type == 'attribute':
  1524. from .services import SimpleCache
  1525. SimpleCache.clear()
  1526. message = "Attribute extraction cache cleared successfully"
  1527. elif cache_type == 'clip':
  1528. VisualProcessingService.clear_clip_cache()
  1529. message = "CLIP model cache cleared successfully"
  1530. else:
  1531. return Response({
  1532. "error": f"Invalid cache_type: {cache_type}",
  1533. "valid_types": ["all", "embedding", "attribute", "clip"]
  1534. }, status=status.HTTP_400_BAD_REQUEST)
  1535. return Response({
  1536. "message": message,
  1537. "statistics": ProductAttributeService.get_cache_stats()
  1538. }, status=status.HTTP_200_OK)
  1539. # Get statistics
  1540. elif action == "stats":
  1541. stats = ProductAttributeService.get_cache_stats()
  1542. config = cache_config.get_cache_config()
  1543. return Response({
  1544. "configuration": config,
  1545. "statistics": stats
  1546. }, status=status.HTTP_200_OK)
  1547. else:
  1548. return Response({
  1549. "error": f"Invalid action: {action}",
  1550. "valid_actions": ["toggle", "clear", "stats"]
  1551. }, status=status.HTTP_400_BAD_REQUEST)
  1552. class CacheStatsView(APIView):
  1553. """
  1554. Simple GET endpoint to retrieve cache statistics.
  1555. """
  1556. def get(self, request):
  1557. """Get current cache statistics."""
  1558. stats = ProductAttributeService.get_cache_stats()
  1559. config = cache_config.get_cache_config()
  1560. return Response({
  1561. "cache_enabled": config["master_cache_enabled"],
  1562. "statistics": stats,
  1563. "timestamp": datetime.now().isoformat()
  1564. }, status=status.HTTP_200_OK)