views.py 102 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387
  1. # ==================== Cleaned & Optimized Imports ====================
  2. import os
  3. import io
  4. import json
  5. import logging
  6. import threading
  7. from datetime import datetime
  8. import pandas as pd
  9. import concurrent.futures
  10. from django.conf import settings
  11. from django.http import HttpResponse
  12. from django.db import transaction
  13. from django.db.models import Prefetch
  14. from rest_framework.views import APIView
  15. from rest_framework.response import Response
  16. from rest_framework import status
  17. from rest_framework.parsers import MultiPartParser, FormParser
  18. from openpyxl import Workbook
  19. from openpyxl.styles import Font, PatternFill, Alignment
  20. from rest_framework.views import APIView
  21. from . import cache_config
  22. # --- Local imports ---
  23. from .models import (
  24. Product,
  25. ProductType,
  26. ProductAttribute,
  27. ProductAttributeValue,
  28. AttributePossibleValue,
  29. )
  30. from .serializers import (
  31. ProductSerializer,
  32. ProductTypeSerializer,
  33. ProductAttributeSerializer,
  34. AttributePossibleValueSerializer,
  35. SingleProductRequestSerializer,
  36. BatchProductRequestSerializer,
  37. ProductAttributeResultSerializer,
  38. BatchProductResponseSerializer,
  39. ProductAttributeValueSerializer,
  40. ProductAttributeValueInputSerializer,
  41. BulkProductAttributeValueSerializer,
  42. ProductWithAttributesSerializer,
  43. )
  44. from .services import ProductAttributeService
  45. from .ocr_service import OCRService
  46. from .visual_processing_service import VisualProcessingService
  47. # --- Configuration for Generated Outputs Folder ---
  48. OUTPUT_FOLDER_NAME = 'generated_outputs'
  49. OUTPUT_ROOT = os.path.join(settings.MEDIA_ROOT, OUTPUT_FOLDER_NAME)
  50. OUTPUT_URL = os.path.join(settings.MEDIA_URL, OUTPUT_FOLDER_NAME).replace('\\', '/') # Ensure forward slashes
  51. LOG_FILE_NAME = 'excel_generation.log'
  52. STATUS_FILE_NAME = 'excel_generation_status.json'
  53. EXCEL_FILE_NAME = 'generated_products.xlsx'
  54. LOG_FILE_PATH = os.path.join(OUTPUT_ROOT, LOG_FILE_NAME)
  55. STATUS_FILE_PATH = os.path.join(OUTPUT_ROOT, STATUS_FILE_NAME)
  56. EXCEL_FILE_PATH = os.path.join(OUTPUT_ROOT, EXCEL_FILE_NAME)
  57. # Ensure the output folder exists
  58. if not os.path.exists(OUTPUT_ROOT):
  59. os.makedirs(OUTPUT_ROOT)
  60. # Configure logging
  61. logging.basicConfig(
  62. filename=LOG_FILE_PATH,
  63. level=logging.INFO,
  64. format='%(asctime)s - %(levelname)s - %(message)s'
  65. )
  66. logger = logging.getLogger(__name__)
  67. class ExtractProductAttributesView(APIView):
  68. """
  69. API endpoint to extract product attributes for a single product by item_id.
  70. Fetches product details from database with source tracking.
  71. Returns attributes in array format: [{"value": "...", "source": "..."}]
  72. Includes OCR and Visual Processing results.
  73. """
  74. def post(self, request):
  75. serializer = SingleProductRequestSerializer(data=request.data)
  76. if not serializer.is_valid():
  77. return Response({"error": serializer.errors}, status=status.HTTP_400_BAD_REQUEST)
  78. validated_data = serializer.validated_data
  79. item_id = validated_data.get("item_id")
  80. # Fetch product from DB
  81. try:
  82. product = Product.objects.get(item_id=item_id)
  83. except Product.DoesNotExist:
  84. return Response(
  85. {"error": f"Product with item_id '{item_id}' not found."},
  86. status=status.HTTP_404_NOT_FOUND
  87. )
  88. # Extract product details
  89. title = product.product_name
  90. short_desc = product.product_short_description
  91. long_desc = product.product_long_description
  92. image_url = product.image_path
  93. # Process image for OCR if required
  94. ocr_results = None
  95. ocr_text = None
  96. visual_results = None
  97. if validated_data.get("process_image", True) and image_url:
  98. # OCR Processing
  99. ocr_service = OCRService()
  100. ocr_results = ocr_service.process_image(image_url)
  101. if ocr_results and ocr_results.get("detected_text"):
  102. ocr_attrs = ProductAttributeService.extract_attributes_from_ocr(
  103. ocr_results, validated_data.get("model")
  104. )
  105. ocr_results["extracted_attributes"] = ocr_attrs
  106. ocr_text = "\n".join([
  107. f"{item['text']} (confidence: {item['confidence']:.2f})"
  108. for item in ocr_results["detected_text"]
  109. ])
  110. # Visual Processing
  111. visual_service = VisualProcessingService()
  112. product_type_hint = product.product_type if hasattr(product, 'product_type') else None
  113. visual_results = visual_service.process_image(image_url, product_type_hint)
  114. # Combine all product text with source tracking
  115. product_text, source_map = ProductAttributeService.combine_product_text(
  116. title=title,
  117. short_desc=short_desc,
  118. long_desc=long_desc,
  119. ocr_text=ocr_text
  120. )
  121. # Extract attributes with enhanced features and source tracking
  122. result = ProductAttributeService.extract_attributes(
  123. product_text=product_text,
  124. mandatory_attrs=validated_data["mandatory_attrs"],
  125. source_map=source_map,
  126. model=validated_data.get("model"),
  127. extract_additional=validated_data.get("extract_additional", True),
  128. multiple=validated_data.get("multiple", []),
  129. threshold_abs=validated_data.get("threshold_abs", 0.65),
  130. margin=validated_data.get("margin", 0.15),
  131. use_dynamic_thresholds=validated_data.get("use_dynamic_thresholds", True),
  132. use_adaptive_margin=validated_data.get("use_adaptive_margin", True),
  133. use_semantic_clustering=validated_data.get("use_semantic_clustering", True)
  134. )
  135. # Attach OCR results if available
  136. if ocr_results:
  137. result["ocr_results"] = ocr_results
  138. # Attach Visual Processing results if available
  139. if visual_results:
  140. result["visual_results"] = visual_results
  141. response_serializer = ProductAttributeResultSerializer(data=result)
  142. if response_serializer.is_valid():
  143. return Response(response_serializer.data, status=status.HTTP_200_OK)
  144. return Response(result, status=status.HTTP_200_OK)
  145. # ==================== OPTIMIZED BATCH VIEW ====================
  146. # class BatchExtractProductAttributesView(APIView):
  147. # """
  148. # ⚡ PERFORMANCE OPTIMIZED: Batch extraction with intelligent parallelization
  149. # Expected performance: 10 products in 30-60 seconds (with image processing)
  150. # """
  151. # def post(self, request):
  152. # import time
  153. # start_time = time.time()
  154. # serializer = BatchProductRequestSerializer(data=request.data)
  155. # if not serializer.is_valid():
  156. # return Response({"error": serializer.errors}, status=status.HTTP_400_BAD_REQUEST)
  157. # validated_data = serializer.validated_data
  158. # product_list = validated_data.get("products", [])
  159. # logger.info(f"🚀 Starting batch processing for {len(product_list)} products")
  160. # # ==================== OPTIMIZATION 1: Bulk DB Query ====================
  161. # item_ids = [p['item_id'] for p in product_list]
  162. # products_queryset = Product.objects.filter(
  163. # item_id__in=item_ids
  164. # ).prefetch_related('attribute_values')
  165. # product_map = {product.item_id: product for product in products_queryset}
  166. # # Prefetch ALL original attribute values in ONE query
  167. # original_values_qs = ProductAttributeValue.objects.filter(
  168. # product__item_id__in=item_ids
  169. # ).select_related('product')
  170. # original_values_map = {}
  171. # for attr_val in original_values_qs:
  172. # item_id = attr_val.product.item_id
  173. # if item_id not in original_values_map:
  174. # original_values_map[item_id] = {}
  175. # original_values_map[item_id][attr_val.attribute_name] = attr_val.original_value
  176. # logger.info(f"✓ Loaded {len(product_map)} products from database")
  177. # # Extract settings
  178. # model = validated_data.get("model")
  179. # extract_additional = validated_data.get("extract_additional", True)
  180. # process_image = validated_data.get("process_image", True)
  181. # multiple = validated_data.get("multiple", [])
  182. # threshold_abs = validated_data.get("threshold_abs", 0.65)
  183. # margin = validated_data.get("margin", 0.15)
  184. # use_dynamic_thresholds = validated_data.get("use_dynamic_thresholds", False)
  185. # use_adaptive_margin = validated_data.get("use_adaptive_margin", False)
  186. # use_semantic_clustering = validated_data.get("use_semantic_clustering", False)
  187. # results = []
  188. # successful = 0
  189. # failed = 0
  190. # # ==================== OPTIMIZATION 2: Conditional Service Init ====================
  191. # # Only initialize if processing images
  192. # ocr_service = None
  193. # visual_service = None
  194. # if process_image:
  195. # from .ocr_service import OCRService
  196. # from .visual_processing_service import VisualProcessingService
  197. # ocr_service = OCRService()
  198. # visual_service = VisualProcessingService()
  199. # logger.info("✓ Image processing services initialized")
  200. # # ==================== OPTIMIZATION 3: Smart Parallelization ====================
  201. # def process_single_product(product_entry):
  202. # """Process a single product (runs in parallel)"""
  203. # import time
  204. # product_start = time.time()
  205. # item_id = product_entry['item_id']
  206. # mandatory_attrs = product_entry['mandatory_attrs']
  207. # if item_id not in product_map:
  208. # return {
  209. # "product_id": item_id,
  210. # "error": "Product not found in database"
  211. # }, False
  212. # product = product_map[item_id]
  213. # try:
  214. # title = product.product_name
  215. # short_desc = product.product_short_description
  216. # long_desc = product.product_long_description
  217. # image_url = product.image_path
  218. # ocr_results = None
  219. # ocr_text = None
  220. # visual_results = None
  221. # # ⚡ SKIP IMAGE PROCESSING IF DISABLED (HUGE TIME SAVER)
  222. # if process_image and image_url:
  223. # if ocr_service:
  224. # ocr_results = ocr_service.process_image(image_url)
  225. # if ocr_results and ocr_results.get("detected_text"):
  226. # ocr_attrs = ProductAttributeService.extract_attributes_from_ocr(
  227. # ocr_results, model
  228. # )
  229. # ocr_results["extracted_attributes"] = ocr_attrs
  230. # ocr_text = "\n".join([
  231. # f"{item['text']} (confidence: {item['confidence']:.2f})"
  232. # for item in ocr_results["detected_text"]
  233. # ])
  234. # if visual_service:
  235. # product_type_hint = product.product_type if hasattr(product, 'product_type') else None
  236. # visual_results = visual_service.process_image(image_url, product_type_hint)
  237. # if visual_results and visual_results.get('visual_attributes'):
  238. # visual_results['visual_attributes'] = ProductAttributeService.format_visual_attributes(
  239. # visual_results['visual_attributes']
  240. # )
  241. # # Combine product text with source tracking
  242. # product_text, source_map = ProductAttributeService.combine_product_text(
  243. # title=title,
  244. # short_desc=short_desc,
  245. # long_desc=long_desc,
  246. # ocr_text=ocr_text
  247. # )
  248. # # ⚡ EXTRACT ATTRIBUTES WITH CACHING ENABLED
  249. # extracted = ProductAttributeService.extract_attributes(
  250. # product_text=product_text,
  251. # mandatory_attrs=mandatory_attrs,
  252. # source_map=source_map,
  253. # model=model,
  254. # extract_additional=extract_additional,
  255. # multiple=multiple,
  256. # # threshold_abs=threshold_abs,
  257. # # margin=margin,
  258. # # use_dynamic_thresholds=use_dynamic_thresholds,
  259. # # use_adaptive_margin=use_adaptive_margin,
  260. # # use_semantic_clustering=use_semantic_clustering,
  261. # use_cache=True # ⚡ CRITICAL: Enable caching
  262. # )
  263. # # Add original values
  264. # original_attrs = original_values_map.get(item_id, {})
  265. # for attr_name, attr_values in extracted.get("mandatory", {}).items():
  266. # if isinstance(attr_values, list):
  267. # for attr_obj in attr_values:
  268. # if isinstance(attr_obj, dict):
  269. # attr_obj["original_value"] = original_attrs.get(attr_name, "")
  270. # for attr_name, attr_values in extracted.get("additional", {}).items():
  271. # if isinstance(attr_values, list):
  272. # for attr_obj in attr_values:
  273. # if isinstance(attr_obj, dict):
  274. # attr_obj["original_value"] = original_attrs.get(attr_name, "")
  275. # result = {
  276. # "product_id": product.item_id,
  277. # "mandatory": extracted.get("mandatory", {}),
  278. # "additional": extracted.get("additional", {}),
  279. # }
  280. # if ocr_results:
  281. # result["ocr_results"] = ocr_results
  282. # if visual_results:
  283. # result["visual_results"] = visual_results
  284. # processing_time = time.time() - product_start
  285. # logger.info(f"✓ Processed {item_id} in {processing_time:.2f}s")
  286. # return result, True
  287. # except Exception as e:
  288. # logger.error(f"❌ Error processing {item_id}: {str(e)}")
  289. # return {
  290. # "product_id": item_id,
  291. # "error": str(e)
  292. # }, False
  293. # # ==================== OPTIMIZATION 4: Parallel Execution ====================
  294. # # Adjust workers based on whether image processing is enabled
  295. # max_workers = min(3 if process_image else 10, len(product_list))
  296. # logger.info(f"⚡ Using {max_workers} parallel workers")
  297. # with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor:
  298. # # Submit all tasks
  299. # future_to_product = {
  300. # executor.submit(process_single_product, product): product
  301. # for product in product_list
  302. # }
  303. # # Collect results as they complete
  304. # for future in concurrent.futures.as_completed(future_to_product):
  305. # try:
  306. # result, success = future.result()
  307. # results.append(result)
  308. # if success:
  309. # successful += 1
  310. # else:
  311. # failed += 1
  312. # except Exception as e:
  313. # failed += 1
  314. # logger.error(f"❌ Future execution error: {str(e)}")
  315. # results.append({
  316. # "product_id": "unknown",
  317. # "error": str(e)
  318. # })
  319. # total_time = time.time() - start_time
  320. # # Get cache statistics
  321. # cache_stats = ProductAttributeService.get_cache_stats()
  322. # logger.info(f"""
  323. # 🎉 BATCH PROCESSING COMPLETE
  324. # ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
  325. # Total products: {len(product_list)}
  326. # Successful: {successful}
  327. # Failed: {failed}
  328. # Total time: {total_time:.2f}s
  329. # Avg time/product: {total_time/len(product_list):.2f}s
  330. # ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
  331. # """)
  332. # batch_result = {
  333. # "results": results,
  334. # "total_products": len(product_list),
  335. # "successful": successful,
  336. # "failed": failed,
  337. # "performance": {
  338. # "total_time_seconds": round(total_time, 2),
  339. # "avg_time_per_product": round(total_time / len(product_list), 2),
  340. # "workers_used": max_workers
  341. # },
  342. # "cache_stats": cache_stats
  343. # }
  344. # response_serializer = BatchProductResponseSerializer(data=batch_result)
  345. # if response_serializer.is_valid():
  346. # return Response(response_serializer.data, status=status.HTTP_200_OK)
  347. # return Response(batch_result, status=status.HTTP_200_OK)
  348. # VERSION WITH PARALLELIZATION
  349. class BatchExtractProductAttributesView(APIView):
  350. """
  351. ⚡ PERFORMANCE OPTIMIZED: Batch extraction with intelligent parallelization
  352. Expected performance: 10 products in 30-60 seconds (with image processing)
  353. NOW WITH USER VALUE REASONING
  354. """
  355. def post(self, request):
  356. import time
  357. start_time = time.time()
  358. serializer = BatchProductRequestSerializer(data=request.data)
  359. if not serializer.is_valid():
  360. return Response({"error": serializer.errors}, status=status.HTTP_400_BAD_REQUEST)
  361. validated_data = serializer.validated_data
  362. product_list = validated_data.get("products", [])
  363. logger.info(f"🚀 Starting batch processing for {len(product_list)} products")
  364. # ==================== OPTIMIZATION 1: Bulk DB Query ====================
  365. item_ids = [p['item_id'] for p in product_list]
  366. products_queryset = Product.objects.filter(
  367. item_id__in=item_ids
  368. ).prefetch_related('attribute_values')
  369. product_map = {product.item_id: product for product in products_queryset}
  370. # Prefetch ALL original attribute values in ONE query
  371. original_values_qs = ProductAttributeValue.objects.filter(
  372. product__item_id__in=item_ids
  373. ).select_related('product')
  374. original_values_map = {}
  375. for attr_val in original_values_qs:
  376. item_id = attr_val.product.item_id
  377. if item_id not in original_values_map:
  378. original_values_map[item_id] = {}
  379. original_values_map[item_id][attr_val.attribute_name] = attr_val.original_value
  380. logger.info(f"✓ Loaded {len(product_map)} products from database")
  381. logger.info(f"✓ Loaded user values for {len(original_values_map)} products")
  382. # Extract settings
  383. model = validated_data.get("model")
  384. extract_additional = validated_data.get("extract_additional", True)
  385. process_image = validated_data.get("process_image", True)
  386. multiple = validated_data.get("multiple", [])
  387. threshold_abs = validated_data.get("threshold_abs", 0.65)
  388. margin = validated_data.get("margin", 0.15)
  389. use_dynamic_thresholds = validated_data.get("use_dynamic_thresholds", False)
  390. use_adaptive_margin = validated_data.get("use_adaptive_margin", False)
  391. use_semantic_clustering = validated_data.get("use_semantic_clustering", False)
  392. results = []
  393. successful = 0
  394. failed = 0
  395. # ==================== OPTIMIZATION 2: Conditional Service Init ====================
  396. ocr_service = None
  397. visual_service = None
  398. if process_image:
  399. from .ocr_service import OCRService
  400. from .visual_processing_service import VisualProcessingService
  401. ocr_service = OCRService()
  402. visual_service = VisualProcessingService()
  403. logger.info("✓ Image processing services initialized")
  404. # ==================== OPTIMIZATION 3: Smart Parallelization ====================
  405. def process_single_product(product_entry):
  406. """Process a single product (runs in parallel)"""
  407. import time
  408. product_start = time.time()
  409. item_id = product_entry['item_id']
  410. mandatory_attrs = product_entry['mandatory_attrs']
  411. if item_id not in product_map:
  412. return {
  413. "product_id": item_id,
  414. "error": "Product not found in database"
  415. }, False
  416. product = product_map[item_id]
  417. try:
  418. title = product.product_name
  419. short_desc = product.product_short_description
  420. long_desc = product.product_long_description
  421. image_url = product.image_path
  422. ocr_results = None
  423. ocr_text = None
  424. visual_results = None
  425. # ⚡ SKIP IMAGE PROCESSING IF DISABLED
  426. if process_image and image_url:
  427. if ocr_service:
  428. ocr_results = ocr_service.process_image(image_url)
  429. if ocr_results and ocr_results.get("detected_text"):
  430. ocr_attrs = ProductAttributeService.extract_attributes_from_ocr(
  431. ocr_results, model
  432. )
  433. ocr_results["extracted_attributes"] = ocr_attrs
  434. ocr_text = "\n".join([
  435. f"{item['text']} (confidence: {item['confidence']:.2f})"
  436. for item in ocr_results["detected_text"]
  437. ])
  438. if visual_service:
  439. product_type_hint = product.product_type if hasattr(product, 'product_type') else None
  440. visual_results = visual_service.process_image(image_url, product_type_hint)
  441. if visual_results and visual_results.get('visual_attributes'):
  442. visual_results['visual_attributes'] = ProductAttributeService.format_visual_attributes(
  443. visual_results['visual_attributes']
  444. )
  445. # Combine product text with source tracking
  446. product_text, source_map = ProductAttributeService.combine_product_text(
  447. title=title,
  448. short_desc=short_desc,
  449. long_desc=long_desc,
  450. ocr_text=ocr_text
  451. )
  452. # 🆕 GET USER-ENTERED VALUES FOR THIS PRODUCT
  453. user_entered_values = original_values_map.get(item_id, {})
  454. print("user entered values are ")
  455. print(user_entered_values)
  456. logger.info(f"Processing {item_id} with {len(user_entered_values)} user-entered values")
  457. # ⚡ EXTRACT ATTRIBUTES WITH USER VALUES AND REASONING
  458. extracted = ProductAttributeService.extract_attributes(
  459. product_text=product_text,
  460. mandatory_attrs=mandatory_attrs,
  461. source_map=source_map,
  462. model=model,
  463. extract_additional=extract_additional,
  464. multiple=multiple,
  465. use_cache=True,
  466. user_entered_values=user_entered_values # 🆕 PASS USER VALUES
  467. )
  468. # NOTE: Original values are now part of LLM response with reasoning
  469. # No need to add them separately - they're already in the "user_value" field
  470. result = {
  471. "product_id": product.item_id,
  472. "mandatory": extracted.get("mandatory", {}),
  473. "additional": extracted.get("additional", {}),
  474. }
  475. if ocr_results:
  476. result["ocr_results"] = ocr_results
  477. if visual_results:
  478. result["visual_results"] = visual_results
  479. processing_time = time.time() - product_start
  480. logger.info(f"✓ Processed {item_id} in {processing_time:.2f}s")
  481. return result, True
  482. except Exception as e:
  483. logger.error(f"❌ Error processing {item_id}: {str(e)}")
  484. return {
  485. "product_id": item_id,
  486. "error": str(e)
  487. }, False
  488. # ==================== OPTIMIZATION 4: Parallel Execution ====================
  489. max_workers = min(1 if process_image else 1, len(product_list))
  490. logger.info(f"⚡ Using {max_workers} parallel workers")
  491. with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor:
  492. future_to_product = {
  493. executor.submit(process_single_product, product): product
  494. for product in product_list
  495. }
  496. for future in concurrent.futures.as_completed(future_to_product):
  497. try:
  498. result, success = future.result()
  499. results.append(result)
  500. if success:
  501. successful += 1
  502. else:
  503. failed += 1
  504. except Exception as e:
  505. failed += 1
  506. logger.error(f"❌ Future execution error: {str(e)}")
  507. results.append({
  508. "product_id": "unknown",
  509. "error": str(e)
  510. })
  511. total_time = time.time() - start_time
  512. # Get cache statistics
  513. cache_stats = ProductAttributeService.get_cache_stats()
  514. logger.info(f"""
  515. 🎉 BATCH PROCESSING COMPLETE
  516. ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
  517. Total products: {len(product_list)}
  518. Successful: {successful}
  519. Failed: {failed}
  520. Total time: {total_time:.2f}s
  521. Avg time/product: {total_time/len(product_list):.2f}s
  522. ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
  523. """)
  524. batch_result = {
  525. "results": results,
  526. "total_products": len(product_list),
  527. "successful": successful,
  528. "failed": failed,
  529. "performance": {
  530. "total_time_seconds": round(total_time, 2),
  531. "avg_time_per_product": round(total_time / len(product_list), 2),
  532. "workers_used": max_workers
  533. },
  534. "cache_stats": cache_stats
  535. }
  536. response_serializer = BatchProductResponseSerializer(data=batch_result)
  537. if response_serializer.is_valid():
  538. return Response(response_serializer.data, status=status.HTTP_200_OK)
  539. return Response(batch_result, status=status.HTTP_200_OK)
  540. # # ==================== views.py (OPTIMIZED FOR 4 API KEYS) ====================
  541. # import concurrent.futures
  542. # import logging
  543. # import time
  544. # from rest_framework.views import APIView
  545. # from rest_framework.response import Response
  546. # from rest_framework import status
  547. # from .models import Product, ProductAttributeValue
  548. # from .serializers import BatchProductRequestSerializer, BatchProductResponseSerializer
  549. # from .services import ProductAttributeService
  550. # logger = logging.getLogger(__name__)
  551. # class BatchExtractProductAttributesView(APIView):
  552. # """
  553. # ⚡ PERFORMANCE OPTIMIZED: Batch extraction with 4-key load balancer
  554. # Expected performance with 4 keys: 10 products in 20-35 seconds (with image processing)
  555. # NOW WITH USER VALUE REASONING + MULTI-KEY SUPPORT
  556. # """
  557. # def post(self, request):
  558. # start_time = time.time()
  559. # serializer = BatchProductRequestSerializer(data=request.data)
  560. # if not serializer.is_valid():
  561. # return Response({"error": serializer.errors}, status=status.HTTP_400_BAD_REQUEST)
  562. # validated_data = serializer.validated_data
  563. # product_list = validated_data.get("products", [])
  564. # logger.info(f"🚀 Starting batch processing for {len(product_list)} products")
  565. # # ==================== OPTIMIZATION 1: Bulk DB Query ====================
  566. # item_ids = [p['item_id'] for p in product_list]
  567. # products_queryset = Product.objects.filter(
  568. # item_id__in=item_ids
  569. # ).prefetch_related('attribute_values')
  570. # product_map = {product.item_id: product for product in products_queryset}
  571. # # Prefetch ALL original attribute values in ONE query
  572. # original_values_qs = ProductAttributeValue.objects.filter(
  573. # product__item_id__in=item_ids
  574. # ).select_related('product')
  575. # original_values_map = {}
  576. # for attr_val in original_values_qs:
  577. # item_id = attr_val.product.item_id
  578. # if item_id not in original_values_map:
  579. # original_values_map[item_id] = {}
  580. # original_values_map[item_id][attr_val.attribute_name] = attr_val.original_value
  581. # logger.info(f"✓ Loaded {len(product_map)} products from database")
  582. # logger.info(f"✓ Loaded user values for {len(original_values_map)} products")
  583. # # Extract settings
  584. # model = validated_data.get("model")
  585. # extract_additional = validated_data.get("extract_additional", True)
  586. # process_image = validated_data.get("process_image", True)
  587. # multiple = validated_data.get("multiple", [])
  588. # threshold_abs = validated_data.get("threshold_abs", 0.65)
  589. # margin = validated_data.get("margin", 0.15)
  590. # use_dynamic_thresholds = validated_data.get("use_dynamic_thresholds", False)
  591. # use_adaptive_margin = validated_data.get("use_adaptive_margin", False)
  592. # use_semantic_clustering = validated_data.get("use_semantic_clustering", False)
  593. # results = []
  594. # successful = 0
  595. # failed = 0
  596. # # ==================== OPTIMIZATION 2: Conditional Service Init ====================
  597. # ocr_service = None
  598. # visual_service = None
  599. # if process_image:
  600. # try:
  601. # from .ocr_service import OCRService
  602. # from .visual_processing_service import VisualProcessingService
  603. # ocr_service = OCRService()
  604. # visual_service = VisualProcessingService()
  605. # logger.info("✓ Image processing services initialized")
  606. # except ImportError as e:
  607. # logger.warning(f"⚠️ Image processing services not available: {e}")
  608. # process_image = False
  609. # # ==================== OPTIMIZATION 3: Smart Parallelization with 4 Keys ====================
  610. # def process_single_product(product_entry):
  611. # """Process a single product (runs in parallel)"""
  612. # product_start = time.time()
  613. # item_id = product_entry['item_id']
  614. # mandatory_attrs = product_entry['mandatory_attrs']
  615. # if item_id not in product_map:
  616. # return {
  617. # "product_id": item_id,
  618. # "error": "Product not found in database"
  619. # }, False
  620. # product = product_map[item_id]
  621. # try:
  622. # title = product.product_name
  623. # short_desc = product.product_short_description
  624. # long_desc = product.product_long_description
  625. # image_url = product.image_path
  626. # ocr_results = None
  627. # ocr_text = None
  628. # visual_results = None
  629. # # ⚡ IMAGE PROCESSING (if enabled)
  630. # if process_image and image_url:
  631. # if ocr_service:
  632. # try:
  633. # ocr_results = ocr_service.process_image(image_url)
  634. # if ocr_results and ocr_results.get("detected_text"):
  635. # ocr_attrs = ProductAttributeService.extract_attributes_from_ocr(
  636. # ocr_results, model
  637. # )
  638. # ocr_results["extracted_attributes"] = ocr_attrs
  639. # ocr_text = "\n".join([
  640. # f"{item['text']} (confidence: {item['confidence']:.2f})"
  641. # for item in ocr_results["detected_text"]
  642. # ])
  643. # except Exception as e:
  644. # logger.warning(f"OCR failed for {item_id}: {e}")
  645. # if visual_service:
  646. # try:
  647. # product_type_hint = product.product_type if hasattr(product, 'product_type') else None
  648. # visual_results = visual_service.process_image(image_url, product_type_hint)
  649. # if visual_results and visual_results.get('visual_attributes'):
  650. # visual_results['visual_attributes'] = ProductAttributeService.format_visual_attributes(
  651. # visual_results['visual_attributes']
  652. # )
  653. # except Exception as e:
  654. # logger.warning(f"Visual processing failed for {item_id}: {e}")
  655. # # Combine product text with source tracking
  656. # product_text, source_map = ProductAttributeService.combine_product_text(
  657. # title=title,
  658. # short_desc=short_desc,
  659. # long_desc=long_desc,
  660. # ocr_text=ocr_text
  661. # )
  662. # # 🆕 GET USER-ENTERED VALUES FOR THIS PRODUCT
  663. # user_entered_values = original_values_map.get(item_id, {})
  664. # if user_entered_values:
  665. # logger.debug(f"Processing {item_id} with {len(user_entered_values)} user-entered values")
  666. # # ⚡ EXTRACT ATTRIBUTES WITH MULTI-KEY LOAD BALANCER
  667. # extracted = ProductAttributeService.extract_attributes(
  668. # product_text=product_text,
  669. # mandatory_attrs=mandatory_attrs,
  670. # source_map=source_map,
  671. # model=model,
  672. # extract_additional=extract_additional,
  673. # multiple=multiple,
  674. # use_cache=True,
  675. # user_entered_values=user_entered_values
  676. # )
  677. # result = {
  678. # "product_id": product.item_id,
  679. # "mandatory": extracted.get("mandatory", {}),
  680. # "additional": extracted.get("additional", {}),
  681. # }
  682. # if ocr_results:
  683. # result["ocr_results"] = ocr_results
  684. # if visual_results:
  685. # result["visual_results"] = visual_results
  686. # processing_time = time.time() - product_start
  687. # logger.info(f"✓ Processed {item_id} in {processing_time:.2f}s")
  688. # return result, True
  689. # except Exception as e:
  690. # logger.error(f"❌ Error processing {item_id}: {str(e)}", exc_info=True)
  691. # return {
  692. # "product_id": item_id,
  693. # "error": str(e)
  694. # }, False
  695. # # ==================== OPTIMIZATION 4: Parallel Execution with 4 Keys ====================
  696. # # With 4 API keys, we can safely run more workers
  697. # # Image processing: 4-6 workers (I/O bound)
  698. # # No image processing: 8-12 workers (CPU bound)
  699. # if process_image:
  700. # max_workers = min(3, len(product_list))
  701. # else:
  702. # max_workers = min(5, len(product_list))
  703. # logger.info(f"⚡ Using {max_workers} parallel workers with 4-key load balancer")
  704. # with concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) as executor:
  705. # future_to_product = {
  706. # executor.submit(process_single_product, product): product
  707. # for product in product_list
  708. # }
  709. # for future in concurrent.futures.as_completed(future_to_product):
  710. # try:
  711. # result, success = future.result()
  712. # results.append(result)
  713. # if success:
  714. # successful += 1
  715. # else:
  716. # failed += 1
  717. # except Exception as e:
  718. # failed += 1
  719. # logger.error(f"❌ Future execution error: {str(e)}")
  720. # results.append({
  721. # "product_id": "unknown",
  722. # "error": str(e)
  723. # })
  724. # total_time = time.time() - start_time
  725. # # Get comprehensive statistics
  726. # cache_stats = ProductAttributeService.get_cache_stats()
  727. # logger.info(f"""
  728. # ╔═══════════════════════════════════════════════════════════╗
  729. # ║ 🎉 BATCH PROCESSING COMPLETE ║
  730. # ╠═══════════════════════════════════════════════════════════╣
  731. # ║ Total products: {len(product_list):<3} ║
  732. # ║ Successful: {successful:<3} ║
  733. # ║ Failed: {failed:<3} ║
  734. # ║ Total time: {total_time:.2f}s ║
  735. # ║ Avg time/product: {total_time/len(product_list):.2f}s ║
  736. # ║ Workers used: {max_workers:<2} ║
  737. # ║ API keys available: {cache_stats['load_balancer'].get('available_keys', 0)}/{cache_stats['load_balancer'].get('total_keys', 0)} ║
  738. # ╚═══════════════════════════════════════════════════════════╝
  739. # """)
  740. # batch_result = {
  741. # "results": results,
  742. # "total_products": len(product_list),
  743. # "successful": successful,
  744. # "failed": failed,
  745. # "performance": {
  746. # "total_time_seconds": round(total_time, 2),
  747. # "avg_time_per_product": round(total_time / len(product_list), 2),
  748. # "workers_used": max_workers,
  749. # "throughput_products_per_second": round(len(product_list) / total_time, 2)
  750. # },
  751. # "cache_stats": cache_stats
  752. # }
  753. # response_serializer = BatchProductResponseSerializer(data=batch_result)
  754. # if response_serializer.is_valid():
  755. # return Response(response_serializer.data, status=status.HTTP_200_OK)
  756. # return Response(batch_result, status=status.HTTP_200_OK)
  757. class ProductListView(APIView):
  758. """
  759. GET API to list all products with details
  760. """
  761. def get(self, request):
  762. products = Product.objects.all()
  763. serializer = ProductSerializer(products, many=True)
  764. return Response(serializer.data, status=status.HTTP_200_OK)
  765. # -------------------------------------------------------------------------------------------------
  766. def generate_product_excel_background():
  767. """
  768. Function to perform batch attribute extraction for all products and generate an Excel file.
  769. Runs in a background thread to avoid blocking the API response.
  770. Logs success/failure and saves a status file for external monitoring.
  771. """
  772. logger.info(f"[{datetime.now().isoformat()}] Starting background product Excel generation and attribute extraction.")
  773. successful = 0
  774. failed = 0
  775. results = [] # To store detailed extraction results for Excel sheet 2
  776. # Function to write status file (SUCCESS/FAILED)
  777. def write_status(status_type, error_msg=None):
  778. status_data = {
  779. "status": status_type,
  780. "timestamp": datetime.now().isoformat(),
  781. "products_processed": successful + failed,
  782. "products_successful": successful,
  783. "products_failed": failed,
  784. # FIX: Use the updated EXCEL_FILE_NAME and OUTPUT_URL
  785. "excel_path": os.path.join(OUTPUT_URL, EXCEL_FILE_NAME) if status_type == "SUCCESS" else None,
  786. "log_path": os.path.join(OUTPUT_URL, LOG_FILE_NAME),
  787. "error_message": error_msg
  788. }
  789. try:
  790. # FIX: STATUS_FILE_PATH is now inside generated_outputs
  791. with open(STATUS_FILE_PATH, 'w') as f:
  792. json.dump(status_data, f, indent=4)
  793. except Exception as e:
  794. logger.exception(f"CRITICAL ERROR: Failed to write status file at {STATUS_FILE_PATH}: {e}")
  795. try:
  796. # 1. PREFETCH all necessary related data to minimize database queries
  797. # Prefetch possible values for mandatory attributes
  798. possible_values_prefetch = Prefetch(
  799. 'attributes',
  800. queryset=ProductAttribute.objects.filter(is_mandatory=True).prefetch_related('possible_values')
  801. )
  802. # Fetch all ProductTypes with their mandatory attributes and possible values
  803. all_product_types = ProductType.objects.prefetch_related(possible_values_prefetch)
  804. product_type_map = {
  805. pt.name: pt for pt in all_product_types
  806. }
  807. # Prepare product_list for batch extraction
  808. all_products = Product.objects.all()
  809. product_list = []
  810. for p in all_products:
  811. mandatory_attrs_dict = {}
  812. product_type_name = p.product_type.strip() if p.product_type else None
  813. if product_type_name and product_type_name in product_type_map:
  814. pt = product_type_map[product_type_name]
  815. # Build the mandatory_attrs dictionary: { "Attribute Name": ["Value 1", "Value 2"], ... }
  816. for attr in pt.attributes.all():
  817. mandatory_attrs_dict[attr.name] = [
  818. pv.value for pv in attr.possible_values.all()
  819. ]
  820. product_list.append({
  821. "item_id": p.item_id,
  822. "product_type_name": product_type_name,
  823. "mandatory_attrs": mandatory_attrs_dict
  824. })
  825. # Batch settings (using defaults)
  826. model = "llama-3.1-8b-instant"
  827. extract_additional = True
  828. process_image = False
  829. multiple = []
  830. threshold_abs = 0.65
  831. margin = 0.15
  832. use_dynamic_thresholds = True
  833. use_adaptive_margin = True
  834. use_semantic_clustering = True
  835. # Batch extraction logic
  836. item_ids = [p['item_id'] for p in product_list]
  837. products_queryset = Product.objects.filter(item_id__in=item_ids)
  838. product_map = {product.item_id: product for product in products_queryset}
  839. found_ids = set(product_map.keys())
  840. for product_entry in product_list:
  841. item_id = product_entry['item_id']
  842. mandatory_attrs = product_entry['mandatory_attrs']
  843. if item_id not in found_ids:
  844. failed += 1
  845. results.append({
  846. "product_id": item_id,
  847. "error": "Product not found in database"
  848. })
  849. logger.warning(f"Product {item_id} not found in database. Skipping extraction.")
  850. continue
  851. product = product_map[item_id]
  852. try:
  853. title = product.product_name
  854. short_desc = product.product_short_description
  855. long_desc = product.product_long_description
  856. image_url = product.image_path
  857. ocr_results = None
  858. ocr_text = None
  859. visual_results = None
  860. if process_image and image_url:
  861. logger.info(f"Processing image for product {item_id}...")
  862. # OCR Processing
  863. ocr_service = OCRService()
  864. ocr_results = ocr_service.process_image(image_url)
  865. if ocr_results and ocr_results.get("detected_text"):
  866. ocr_attrs = ProductAttributeService.extract_attributes_from_ocr(
  867. ocr_results, model
  868. )
  869. ocr_results["extracted_attributes"] = ocr_attrs
  870. ocr_text = "\n".join([
  871. f"{item['text']} (confidence: {item['confidence']:.2f})"
  872. for item in ocr_results["detected_text"]
  873. ])
  874. # Visual Processing
  875. visual_service = VisualProcessingService()
  876. product_type_hint = product.product_type if product.product_type else None
  877. visual_results = visual_service.process_image(image_url, product_type_hint)
  878. if visual_results and visual_results.get('visual_attributes'):
  879. visual_results['visual_attributes'] = ProductAttributeService.format_visual_attributes(
  880. visual_results['visual_attributes']
  881. )
  882. logger.info(f"Image processing done for product {item_id}.")
  883. # Combine product text with source tracking
  884. product_text, source_map = ProductAttributeService.combine_product_text(
  885. title=title,
  886. short_desc=short_desc,
  887. long_desc=long_desc,
  888. ocr_text=ocr_text
  889. )
  890. # Attribute Extraction with source tracking
  891. extracted = ProductAttributeService.extract_attributes(
  892. product_text=product_text,
  893. mandatory_attrs=mandatory_attrs,
  894. source_map=source_map,
  895. model=model,
  896. extract_additional=extract_additional,
  897. multiple=multiple,
  898. threshold_abs=threshold_abs,
  899. margin=margin,
  900. use_dynamic_thresholds=use_dynamic_thresholds,
  901. use_adaptive_margin=use_adaptive_margin,
  902. use_semantic_clustering=use_semantic_clustering
  903. )
  904. result = {
  905. "product_id": item_id,
  906. "mandatory": extracted.get("mandatory", {}),
  907. "additional": extracted.get("additional", {}),
  908. }
  909. if ocr_results:
  910. result["ocr_results"] = ocr_results
  911. if visual_results:
  912. result["visual_results"] = visual_results
  913. results.append(result)
  914. successful += 1
  915. logger.info(f"Attribute extraction successful for product {item_id}.")
  916. except Exception as e:
  917. failed += 1
  918. results.append({
  919. "product_id": item_id,
  920. "error": str(e)
  921. })
  922. logger.exception(f"Error during attribute extraction for product {item_id}.")
  923. logger.info(f"Batch extraction phase complete. Successful: {successful}, Failed: {failed}")
  924. # --------------------------------------------------------------------------------
  925. # Generate and save the Excel file
  926. # --------------------------------------------------------------------------------
  927. wb = Workbook()
  928. # Sheet 1: Products (from DB) (Logic is the same, skipped for brevity)
  929. ws_products = wb.active
  930. ws_products.title = "Products"
  931. products_headers = ['ITEM ID', 'PRODUCT NAME', 'PRODUCT TYPE', 'Product Short Description', 'Product Long Description', 'image_path']
  932. header_fill = PatternFill(start_color="366092", end_color="366092", fill_type="solid")
  933. header_font = Font(bold=True, color="FFFFFF")
  934. for col_num, header in enumerate(products_headers, 1):
  935. cell = ws_products.cell(row=1, column=col_num)
  936. cell.value = header
  937. cell.fill = header_fill
  938. cell.font = header_font
  939. cell.alignment = Alignment(horizontal="center", vertical="center")
  940. all_products_db = Product.objects.all()
  941. for row_num, product in enumerate(all_products_db, 2):
  942. ws_products.cell(row=row_num, column=1, value=product.item_id)
  943. ws_products.cell(row=row_num, column=2, value=product.product_name)
  944. ws_products.cell(row=row_num, column=3, value=product.product_type)
  945. ws_products.cell(row=row_num, column=4, value=product.product_short_description)
  946. ws_products.cell(row=row_num, column=5, value=product.product_long_description)
  947. ws_products.cell(row=row_num, column=6, value=product.image_path)
  948. for col_dim, width in zip(['A', 'B', 'C', 'D', 'E', 'F'], [15, 25, 15, 35, 50, 45]):
  949. ws_products.column_dimensions[col_dim].width = width
  950. # Sheet 2: Attribute_values (Logic is the same, skipped for brevity)
  951. ws_attributes = wb.create_sheet("Attribute_values")
  952. attributes_headers = ['item_id', 'attribute_name', 'original_value', 'generated_value']
  953. for col_num, header in enumerate(attributes_headers, 1):
  954. cell = ws_attributes.cell(row=1, column=col_num)
  955. cell.value = header
  956. cell.fill = header_fill
  957. cell.font = header_font
  958. cell.alignment = Alignment(horizontal="center", vertical="center")
  959. row_num = 2
  960. all_original_attrs = ProductAttributeValue.objects.all()
  961. original_attrs_lookup = {
  962. (attr.product.item_id, attr.attribute_name): attr.original_value
  963. for attr in all_original_attrs
  964. }
  965. processed_original_keys = set()
  966. for res in results:
  967. # ... (Excel writing logic for generated/original attributes remains unchanged)
  968. item_id = res["product_id"]
  969. if "error" in res:
  970. for (orig_item_id, orig_attr_name), orig_value in original_attrs_lookup.items():
  971. if orig_item_id == item_id:
  972. ws_attributes.cell(row=row_num, column=1, value=orig_item_id)
  973. ws_attributes.cell(row=row_num, column=2, value=orig_attr_name)
  974. ws_attributes.cell(row=row_num, column=3, value=orig_value)
  975. ws_attributes.cell(row=row_num, column=4, value=f"Extraction Failed: {res['error']}")
  976. processed_original_keys.add((orig_item_id, orig_attr_name))
  977. row_num += 1
  978. continue
  979. generated_attrs = {}
  980. for cat in ["mandatory", "additional"]:
  981. attrs = res.get(cat, {})
  982. for attr_name, values in attrs.items():
  983. for val in values:
  984. key = (item_id, attr_name)
  985. if key not in generated_attrs:
  986. generated_attrs[key] = []
  987. generated_attrs[key].append(f"{val['value']} (source: {val['source']})")
  988. ocr = res.get("ocr_results")
  989. if ocr and "extracted_attributes" in ocr and isinstance(ocr["extracted_attributes"], dict):
  990. for attr_name, values in ocr["extracted_attributes"].items():
  991. for val in values:
  992. key = (item_id, attr_name)
  993. if key not in generated_attrs:
  994. generated_attrs[key] = []
  995. generated_attrs[key].append(f"{val['value']} (source: {val['source']})")
  996. visual = res.get("visual_results")
  997. if visual and "visual_attributes" in visual:
  998. vis_attrs = visual["visual_attributes"]
  999. if isinstance(vis_attrs, dict):
  1000. for attr_name, values in vis_attrs.items():
  1001. if not isinstance(values, list):
  1002. values = [{"value": values, "source": "visual"}]
  1003. for val in values:
  1004. key = (item_id, attr_name)
  1005. if key not in generated_attrs:
  1006. generated_attrs[key] = []
  1007. generated_attrs[key].append(f"{val['value']} (source: {val.get('source', 'visual')})")
  1008. elif isinstance(vis_attrs, list):
  1009. for item in vis_attrs:
  1010. attr_name = item.get("attribute_name") or item.get("name")
  1011. if not attr_name: continue
  1012. value = item.get("value", "")
  1013. source = item.get("source", "visual")
  1014. key = (item_id, attr_name)
  1015. if key not in generated_attrs:
  1016. generated_attrs[key] = []
  1017. generated_attrs[key].append(f"{value} (source: {source})")
  1018. for (attr_item_id, attr_name), gen_values in generated_attrs.items():
  1019. original_value = original_attrs_lookup.get((attr_item_id, attr_name), "")
  1020. generated_value = "; ".join(gen_values) if gen_values else ""
  1021. ws_attributes.cell(row=row_num, column=1, value=attr_item_id)
  1022. ws_attributes.cell(row=row_num, column=2, value=attr_name)
  1023. ws_attributes.cell(row=row_num, column=3, value=original_value)
  1024. ws_attributes.cell(row=row_num, column=4, value=generated_value)
  1025. processed_original_keys.add((attr_item_id, attr_name))
  1026. row_num += 1
  1027. for (orig_item_id, orig_attr_name), orig_value in original_attrs_lookup.items():
  1028. if orig_item_id == item_id and (orig_item_id, orig_attr_name) not in processed_original_keys:
  1029. ws_attributes.cell(row=row_num, column=1, value=orig_item_id)
  1030. ws_attributes.cell(row=row_num, column=2, value=orig_attr_name)
  1031. ws_attributes.cell(row=row_num, column=3, value=orig_value)
  1032. ws_attributes.cell(row=row_num, column=4, value="")
  1033. processed_original_keys.add((orig_item_id, orig_attr_name))
  1034. row_num += 1
  1035. for (orig_item_id, orig_attr_name), orig_value in original_attrs_lookup.items():
  1036. if (orig_item_id, orig_attr_name) not in processed_original_keys:
  1037. ws_attributes.cell(row=row_num, column=1, value=orig_item_id)
  1038. ws_attributes.cell(row=row_num, column=2, value=orig_attr_name)
  1039. ws_attributes.cell(row=row_num, column=3, value=orig_value)
  1040. ws_attributes.cell(row=row_num, column=4, value="Original value only (Product not processed in batch)")
  1041. row_num += 1
  1042. for col_dim, width in zip(['A', 'B', 'C', 'D'], [15, 35, 50, 50]):
  1043. ws_attributes.column_dimensions[col_dim].width = width
  1044. # FIX: Save to the new EXCEL_FILE_PATH
  1045. wb.save(EXCEL_FILE_PATH)
  1046. logger.info(f"Excel file successfully saved to {EXCEL_FILE_PATH}")
  1047. # Write SUCCESS status
  1048. write_status("SUCCESS")
  1049. logger.info("Background task finished successfully.")
  1050. except Exception as e:
  1051. # Log the critical error and write FAILED status
  1052. logger.exception("CRITICAL ERROR during background Excel generation process.")
  1053. write_status("FAILED", error_msg=str(e))
  1054. # -------------------------------------------------------------------------------------------------
  1055. class ProductUploadExcelView(APIView):
  1056. """
  1057. POST API to upload an Excel file.
  1058. """
  1059. parser_classes = (MultiPartParser, FormParser)
  1060. def post(self, request, *args, **kwargs):
  1061. file_obj = request.FILES.get('file')
  1062. if not file_obj:
  1063. return Response({'error': 'No file provided'}, status=status.HTTP_400_BAD_REQUEST)
  1064. try:
  1065. # ... (Upload and DB processing logic remains unchanged)
  1066. # Read all sheets from Excel file
  1067. excel_file = pd.ExcelFile(file_obj)
  1068. # Check if required sheets exist
  1069. if 'Products' not in excel_file.sheet_names:
  1070. logger.error(f"Upload failed: Missing 'Products' sheet in file.")
  1071. return Response({
  1072. 'error': "Missing 'Products' sheet",
  1073. 'available_sheets': excel_file.sheet_names
  1074. }, status=status.HTTP_400_BAD_REQUEST)
  1075. df_products = pd.read_excel(excel_file, sheet_name='Products')
  1076. df_products.columns = [c.strip().lower().replace(' ', '_') for c in df_products.columns]
  1077. expected_product_cols = {
  1078. 'item_id', 'product_name', 'product_long_description',
  1079. 'product_short_description', 'product_type', 'image_path'
  1080. }
  1081. if not expected_product_cols.issubset(df_products.columns):
  1082. logger.error(f"Upload failed: Missing required columns in Products sheet.")
  1083. return Response({
  1084. 'error': 'Missing required columns in Products sheet',
  1085. 'required_columns': list(expected_product_cols),
  1086. 'found_columns': list(df_products.columns)
  1087. }, status=status.HTTP_400_BAD_REQUEST)
  1088. df_attributes = None
  1089. has_attributes_sheet = 'Attribute_values' in excel_file.sheet_names
  1090. if has_attributes_sheet:
  1091. df_attributes = pd.read_excel(excel_file, sheet_name='Attribute_values')
  1092. df_attributes.columns = [c.strip().lower().replace(' ', '_') for c in df_attributes.columns]
  1093. expected_attr_cols = {'item_id', 'attribute_name', 'original_value'}
  1094. if not expected_attr_cols.issubset(df_attributes.columns):
  1095. logger.error(f"Upload failed: Missing required columns in Attribute_values sheet.")
  1096. return Response({
  1097. 'error': 'Missing required columns in Attribute_values sheet',
  1098. 'required_columns': list(expected_attr_cols),
  1099. 'found_columns': list(df_attributes.columns)
  1100. }, status=status.HTTP_400_BAD_REQUEST)
  1101. products_created = 0
  1102. products_updated = 0
  1103. attributes_created = 0
  1104. attributes_updated = 0
  1105. products_failed = 0
  1106. attributes_failed = 0
  1107. errors = []
  1108. with transaction.atomic():
  1109. for idx, row in df_products.iterrows():
  1110. item_id = str(row.get('item_id', '')).strip()
  1111. product_type = str(row.get('product_type', '')).strip()
  1112. if not item_id:
  1113. products_failed += 1
  1114. errors.append(f"Products Row {idx + 2}: Missing item_id")
  1115. continue
  1116. try:
  1117. if product_type:
  1118. ProductType.objects.get_or_create(name=product_type)
  1119. defaults = {
  1120. 'product_name': str(row.get('product_name', '')),
  1121. 'product_long_description': str(row.get('product_long_description', '')),
  1122. 'product_short_description': str(row.get('product_short_description', '')),
  1123. 'product_type': product_type,
  1124. 'image_path': str(row.get('image_path', '')),
  1125. }
  1126. obj, created = Product.objects.update_or_create(item_id=item_id, defaults=defaults)
  1127. if created: products_created += 1
  1128. else: products_updated += 1
  1129. except Exception as e:
  1130. products_failed += 1
  1131. errors.append(f"Products Row {idx + 2} (item_id: {item_id}): {str(e)}")
  1132. logger.error(f"Error processing product {item_id} in Products sheet: {e}")
  1133. if has_attributes_sheet and df_attributes is not None:
  1134. item_ids_in_attrs = df_attributes['item_id'].astype(str).unique()
  1135. existing_products = {p.item_id: p for p in Product.objects.filter(item_id__in=item_ids_in_attrs)}
  1136. for idx, row in df_attributes.iterrows():
  1137. item_id = str(row.get('item_id', '')).strip()
  1138. attribute_name = str(row.get('attribute_name', '')).strip()
  1139. original_value = str(row.get('original_value', '')).strip()
  1140. if not item_id or not attribute_name:
  1141. attributes_failed += 1
  1142. errors.append(f"Attribute_values Row {idx + 2}: Missing item_id or attribute_name")
  1143. continue
  1144. product = existing_products.get(item_id)
  1145. if not product:
  1146. attributes_failed += 1
  1147. errors.append(f"Attribute_values Row {idx + 2}: Product with item_id '{item_id}' not found. Make sure it exists in Products sheet.")
  1148. continue
  1149. try:
  1150. attr_obj, created = ProductAttributeValue.objects.update_or_create(
  1151. product=product,
  1152. attribute_name=attribute_name,
  1153. defaults={'original_value': original_value}
  1154. )
  1155. if created: attributes_created += 1
  1156. else: attributes_updated += 1
  1157. except Exception as e:
  1158. attributes_failed += 1
  1159. errors.append(f"Attribute_values Row {idx + 2} (item_id: {item_id}, attribute: {attribute_name}): {str(e)}")
  1160. logger.error(f"Error processing attribute {attribute_name} for product {item_id}: {e}")
  1161. # Prepare response data
  1162. response_data = {
  1163. 'message': 'Upload completed',
  1164. 'products': {
  1165. 'created': products_created, 'updated': products_updated, 'failed': products_failed,
  1166. 'total_processed': products_created + products_updated + products_failed
  1167. },
  1168. 'attribute_values': {
  1169. 'created': attributes_created, 'updated': attributes_updated, 'failed': attributes_failed,
  1170. 'total_processed': attributes_created + attributes_updated + attributes_failed
  1171. } if has_attributes_sheet else {'message': 'Attribute_values sheet not found in Excel file'},
  1172. 'generated_excel_status': 'Excel generation started in the background.'
  1173. }
  1174. if errors:
  1175. response_data['errors'] = errors[:50]
  1176. if len(errors) > 50:
  1177. response_data['errors'].append(f"... and {len(errors) - 50} more errors")
  1178. upload_status = status.HTTP_201_CREATED if products_failed == 0 and attributes_failed == 0 else status.HTTP_207_MULTI_STATUS
  1179. # Start background thread for Excel generation if upload was successful
  1180. if products_failed == 0 and attributes_failed == 0:
  1181. logger.info("API call successful. Triggering background Excel generation thread is commented for now !!!!.")
  1182. # threading.Thread(target=generate_product_excel_background, daemon=True).start()
  1183. ## FIX: Update monitoring URLs to point to the new generated_outputs subfolder
  1184. # response_data['generated_excel_status'] = 'Background Excel generation triggered successfully.'
  1185. # response_data['monitoring'] = {
  1186. # 'excel_file': os.path.join(OUTPUT_URL, EXCEL_FILE_NAME),
  1187. # 'status_file': os.path.join(OUTPUT_URL, STATUS_FILE_NAME),
  1188. # 'log_file': os.path.join(OUTPUT_URL, LOG_FILE_NAME),
  1189. # 'note': 'These files will be available once the background process completes.'
  1190. # }
  1191. else:
  1192. logger.warning(f"API call finished with errors ({products_failed} products, {attributes_failed} attributes). Not triggering background excel generation.")
  1193. response_data['generated_excel_status'] = 'Background Excel generation was NOT triggered due to upload errors. Fix upload errors and re-upload.'
  1194. return Response(response_data, status=upload_status)
  1195. except pd.errors.EmptyDataError:
  1196. logger.error('The uploaded Excel file is empty or invalid.')
  1197. return Response({'error': 'The uploaded Excel file is empty or invalid'}, status=status.HTTP_400_BAD_REQUEST)
  1198. except Exception as e:
  1199. logger.exception(f'An unexpected error occurred while processing the file.')
  1200. return Response({'error': f'An unexpected error occurred while processing the file: {str(e)}'}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
  1201. class DownloadExcelTemplateView(APIView):
  1202. """
  1203. GET API to download an Excel template with two sheets:
  1204. 1. Products sheet with sample data
  1205. 2. Attribute_values sheet with sample data
  1206. """
  1207. def get(self, request):
  1208. # Create a new workbook
  1209. wb = Workbook()
  1210. # Remove default sheet
  1211. if 'Sheet' in wb.sheetnames:
  1212. wb.remove(wb['Sheet'])
  1213. # ===== Create Products Sheet =====
  1214. ws_products = wb.create_sheet("Products", 0)
  1215. # Define headers for Products
  1216. products_headers = [
  1217. 'ITEM ID',
  1218. 'PRODUCT NAME',
  1219. 'PRODUCT TYPE',
  1220. 'Product Short Description',
  1221. 'Product Long Description',
  1222. 'image_path'
  1223. ]
  1224. # Style for headers
  1225. header_fill = PatternFill(start_color="366092", end_color="366092", fill_type="solid")
  1226. header_font = Font(bold=True, color="FFFFFF")
  1227. # Add headers to Products sheet
  1228. for col_num, header in enumerate(products_headers, 1):
  1229. cell = ws_products.cell(row=1, column=col_num)
  1230. cell.value = header
  1231. cell.fill = header_fill
  1232. cell.font = header_font
  1233. cell.alignment = Alignment(horizontal="center", vertical="center")
  1234. # Add sample data to Products sheet
  1235. sample_products = [
  1236. [
  1237. '3217373735',
  1238. 'Blue V-Neck T-Shirt',
  1239. 'Clothing',
  1240. 'Stylish blue t-shirt with v-neck design',
  1241. 'Premium quality cotton t-shirt featuring a classic v-neck design. Perfect for casual wear. Available in vibrant blue color.',
  1242. 'https://images.unsplash.com/photo-1521572163474-6864f9cf17ab'
  1243. ],
  1244. [
  1245. '1234567890',
  1246. 'Red Cotton Dress',
  1247. 'Clothing',
  1248. 'Beautiful red dress for special occasions',
  1249. 'Elegant red dress made from 100% cotton fabric. Features a flowing design perfect for summer events and parties.',
  1250. 'https://images.unsplash.com/photo-1595777457583-95e059d581b8'
  1251. ],
  1252. [
  1253. '9876543210',
  1254. 'Steel Screws Pack',
  1255. 'Hardware',
  1256. 'Pack of zinc plated steel screws',
  1257. 'Professional grade steel screws with zinc plating for corrosion resistance. Pack contains 50 pieces, 2 inch length, M6 thread size.',
  1258. 'https://images.unsplash.com/photo-1542272604-787c3835535d'
  1259. ]
  1260. ]
  1261. for row_num, row_data in enumerate(sample_products, 2):
  1262. for col_num, value in enumerate(row_data, 1):
  1263. ws_products.cell(row=row_num, column=col_num, value=value)
  1264. # Adjust column widths for Products sheet
  1265. ws_products.column_dimensions['A'].width = 15 # ITEM ID
  1266. ws_products.column_dimensions['B'].width = 25 # PRODUCT NAME
  1267. ws_products.column_dimensions['C'].width = 15 # PRODUCT TYPE
  1268. ws_products.column_dimensions['D'].width = 35 # Short Description
  1269. ws_products.column_dimensions['E'].width = 50 # Long Description
  1270. ws_products.column_dimensions['F'].width = 45 # image_path
  1271. # ===== Create Attribute_values Sheet =====
  1272. ws_attributes = wb.create_sheet("Attribute_values", 1)
  1273. # Define headers for Attribute_values
  1274. attributes_headers = ['item_id', 'attribute_name', 'original_value']
  1275. # Add headers to Attribute_values sheet
  1276. for col_num, header in enumerate(attributes_headers, 1):
  1277. cell = ws_attributes.cell(row=1, column=col_num)
  1278. cell.value = header
  1279. cell.fill = header_fill
  1280. cell.font = header_font
  1281. cell.alignment = Alignment(horizontal="center", vertical="center")
  1282. # Add sample data to Attribute_values sheet
  1283. sample_attributes = [
  1284. ['3217373735', 'Clothing Neck Style', 'V-Neck Square'],
  1285. ['3217373735', 'Condition', 'New with tags'],
  1286. ['3217373735', 'Material', '100% Cotton'],
  1287. ['3217373735', 'Color', 'Sky Blue'],
  1288. ['3217373735', 'Size', 'Medium'],
  1289. ['1234567890', 'Sleeve Length', 'Sleeveless'],
  1290. ['1234567890', 'Condition', 'Brand New'],
  1291. ['1234567890', 'Pattern', 'Solid'],
  1292. ['1234567890', 'Material', 'Cotton Blend'],
  1293. ['1234567890', 'Color', 'Crimson Red'],
  1294. ['9876543210', 'Material', 'Stainless Steel'],
  1295. ['9876543210', 'Thread Size', 'M6'],
  1296. ['9876543210', 'Length', '2 inches'],
  1297. ['9876543210', 'Coating', 'Zinc Plated'],
  1298. ['9876543210', 'Package Quantity', '50 pieces'],
  1299. ]
  1300. for row_num, row_data in enumerate(sample_attributes, 2):
  1301. for col_num, value in enumerate(row_data, 1):
  1302. ws_attributes.cell(row=row_num, column=col_num, value=value)
  1303. # Adjust column widths for Attribute_values sheet
  1304. ws_attributes.column_dimensions['A'].width = 15 # item_id
  1305. ws_attributes.column_dimensions['B'].width = 25 # attribute_name
  1306. ws_attributes.column_dimensions['C'].width = 30 # original_value
  1307. # Add instructions sheet
  1308. ws_instructions = wb.create_sheet("Instructions", 2)
  1309. instructions_text = [
  1310. ['Excel Upload Instructions', ''],
  1311. ['', ''],
  1312. ['Sheet 1: Products', ''],
  1313. ['- Contains product basic information', ''],
  1314. ['- All columns are required', ''],
  1315. ['- ITEM ID must be unique', ''],
  1316. ['', ''],
  1317. ['Sheet 2: Attribute_values', ''],
  1318. ['- Contains original/manual attribute values', ''],
  1319. ['- item_id must match an ITEM ID from Products sheet', ''],
  1320. ['- Multiple rows can have the same item_id (for different attributes)', ''],
  1321. ['- Each attribute per product should be on a separate row', ''],
  1322. ['', ''],
  1323. ['Upload Process:', ''],
  1324. ['1. Fill in your product data in the Products sheet', ''],
  1325. ['2. Fill in attribute values in the Attribute_values sheet', ''],
  1326. ['3. Ensure item_id values match between both sheets', ''],
  1327. ['4. Save the file and upload via API', ''],
  1328. ['', ''],
  1329. ['Notes:', ''],
  1330. ['- Do not change sheet names (must be "Products" and "Attribute_values")', ''],
  1331. ['- Do not change column header names', ''],
  1332. ['- You can delete the sample data rows', ''],
  1333. ['- You can delete this Instructions sheet before uploading', ''],
  1334. ]
  1335. for row_num, row_data in enumerate(instructions_text, 1):
  1336. ws_instructions.cell(row=row_num, column=1, value=row_data[0])
  1337. if row_num == 1:
  1338. cell = ws_instructions.cell(row=row_num, column=1)
  1339. cell.font = Font(bold=True, size=14)
  1340. ws_instructions.column_dimensions['A'].width = 60
  1341. # Save to BytesIO
  1342. output = io.BytesIO()
  1343. wb.save(output)
  1344. output.seek(0)
  1345. # Create response
  1346. response = HttpResponse(
  1347. output.getvalue(),
  1348. content_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
  1349. )
  1350. response['Content-Disposition'] = 'attachment; filename=product_upload_template.xlsx'
  1351. return response
  1352. class DownloadProductsWithAttributesExcelView(APIView):
  1353. """
  1354. GET API to download existing products with their attribute values as Excel.
  1355. Useful for users to update existing data.
  1356. """
  1357. def get(self, request):
  1358. from .models import Product, ProductAttributeValue
  1359. # Create workbook
  1360. wb = Workbook()
  1361. if 'Sheet' in wb.sheetnames:
  1362. wb.remove(wb['Sheet'])
  1363. # ===== Products Sheet =====
  1364. ws_products = wb.create_sheet("Products", 0)
  1365. # Headers
  1366. products_headers = [
  1367. 'ITEM ID', 'PRODUCT NAME', 'PRODUCT TYPE',
  1368. 'Product Short Description', 'Product Long Description', 'image_path'
  1369. ]
  1370. header_fill = PatternFill(start_color="366092", end_color="366092", fill_type="solid")
  1371. header_font = Font(bold=True, color="FFFFFF")
  1372. for col_num, header in enumerate(products_headers, 1):
  1373. cell = ws_products.cell(row=1, column=col_num)
  1374. cell.value = header
  1375. cell.fill = header_fill
  1376. cell.font = header_font
  1377. cell.alignment = Alignment(horizontal="center", vertical="center")
  1378. # Fetch and add product data
  1379. products = Product.objects.all()
  1380. for row_num, product in enumerate(products, 2):
  1381. ws_products.cell(row=row_num, column=1, value=product.item_id)
  1382. ws_products.cell(row=row_num, column=2, value=product.product_name)
  1383. ws_products.cell(row=row_num, column=3, value=product.product_type)
  1384. ws_products.cell(row=row_num, column=4, value=product.product_short_description)
  1385. ws_products.cell(row=row_num, column=5, value=product.product_long_description)
  1386. ws_products.cell(row=row_num, column=6, value=product.image_path)
  1387. # Adjust widths
  1388. ws_products.column_dimensions['A'].width = 15
  1389. ws_products.column_dimensions['B'].width = 25
  1390. ws_products.column_dimensions['C'].width = 15
  1391. ws_products.column_dimensions['D'].width = 35
  1392. ws_products.column_dimensions['E'].width = 50
  1393. ws_products.column_dimensions['F'].width = 45
  1394. # ===== Attribute_values Sheet =====
  1395. ws_attributes = wb.create_sheet("Attribute_values", 1)
  1396. attributes_headers = ['item_id', 'attribute_name', 'original_value']
  1397. for col_num, header in enumerate(attributes_headers, 1):
  1398. cell = ws_attributes.cell(row=1, column=col_num)
  1399. cell.value = header
  1400. cell.fill = header_fill
  1401. cell.font = header_font
  1402. cell.alignment = Alignment(horizontal="center", vertical="center")
  1403. # Fetch and add attribute values
  1404. attributes = ProductAttributeValue.objects.select_related('product').all()
  1405. for row_num, attr in enumerate(attributes, 2):
  1406. ws_attributes.cell(row=row_num, column=1, value=attr.product.item_id)
  1407. ws_attributes.cell(row=row_num, column=2, value=attr.attribute_name)
  1408. ws_attributes.cell(row=row_num, column=3, value=attr.original_value)
  1409. ws_attributes.column_dimensions['A'].width = 15
  1410. ws_attributes.column_dimensions['B'].width = 25
  1411. ws_attributes.column_dimensions['C'].width = 30
  1412. # Save to BytesIO
  1413. output = io.BytesIO()
  1414. wb.save(output)
  1415. output.seek(0)
  1416. response = HttpResponse(
  1417. output.getvalue(),
  1418. content_type='application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
  1419. )
  1420. response['Content-Disposition'] = 'attachment; filename=products_export.xlsx'
  1421. return response
  1422. class ProductAttributesUploadView(APIView):
  1423. """
  1424. POST API to upload an Excel file and add mandatory/additional attributes
  1425. for product types with possible values.
  1426. """
  1427. parser_classes = (MultiPartParser, FormParser)
  1428. def post(self, request):
  1429. file_obj = request.FILES.get('file')
  1430. if not file_obj:
  1431. return Response({"error": "No file provided."}, status=status.HTTP_400_BAD_REQUEST)
  1432. try:
  1433. df = pd.read_excel(file_obj)
  1434. required_columns = {'product_type', 'attribute_name', 'is_mandatory', 'possible_values'}
  1435. if not required_columns.issubset(df.columns):
  1436. return Response({
  1437. "error": f"Missing required columns. Found: {list(df.columns)}"
  1438. }, status=status.HTTP_400_BAD_REQUEST)
  1439. for _, row in df.iterrows():
  1440. product_type_name = str(row['product_type']).strip()
  1441. attr_name = str(row['attribute_name']).strip()
  1442. is_mandatory = str(row['is_mandatory']).strip().lower() in ['yes', 'true', '1']
  1443. possible_values = str(row.get('possible_values', '')).strip()
  1444. # Get or create product type
  1445. product_type, _ = ProductType.objects.get_or_create(name=product_type_name)
  1446. # Get or create attribute
  1447. attribute, _ = ProductAttribute.objects.get_or_create(
  1448. product_type=product_type,
  1449. name=attr_name,
  1450. defaults={'is_mandatory': is_mandatory}
  1451. )
  1452. attribute.is_mandatory = is_mandatory
  1453. attribute.save()
  1454. # Handle possible values
  1455. AttributePossibleValue.objects.filter(attribute=attribute).delete()
  1456. if possible_values:
  1457. for val in [v.strip() for v in possible_values.split(',') if v.strip()]:
  1458. AttributePossibleValue.objects.create(attribute=attribute, value=val)
  1459. return Response({"message": "Attributes uploaded successfully."}, status=status.HTTP_201_CREATED)
  1460. except Exception as e:
  1461. return Response({"error": str(e)}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
  1462. class ProductTypeAttributesView(APIView):
  1463. """
  1464. API to view, create, update, and delete product type attributes and their possible values.
  1465. Also supports dynamic product type creation.
  1466. """
  1467. def get(self, request):
  1468. """
  1469. Retrieve all product types with their attributes and possible values.
  1470. """
  1471. product_types = ProductType.objects.all()
  1472. serializer = ProductTypeSerializer(product_types, many=True)
  1473. # Transform the serialized data into the requested format
  1474. result = []
  1475. for pt in serializer.data:
  1476. for attr in pt['attributes']:
  1477. result.append({
  1478. 'product_type': pt['name'],
  1479. 'attribute_name': attr['name'],
  1480. 'is_mandatory': 'Yes' if attr['is_mandatory'] else 'No',
  1481. 'possible_values': ', '.join([pv['value'] for pv in attr['possible_values']])
  1482. })
  1483. return Response(result, status=status.HTTP_200_OK)
  1484. def post(self, request):
  1485. """
  1486. Create a new product type or attribute with possible values.
  1487. Expected payload example:
  1488. {
  1489. "product_type": "Hardware Screws",
  1490. "attribute_name": "Material",
  1491. "is_mandatory": "Yes",
  1492. "possible_values": "Steel, Zinc Plated, Stainless Steel"
  1493. }
  1494. """
  1495. try:
  1496. product_type_name = request.data.get('product_type')
  1497. attribute_name = request.data.get('attribute_name', '')
  1498. is_mandatory = request.data.get('is_mandatory', '').lower() in ['yes', 'true', '1']
  1499. possible_values = request.data.get('possible_values', '')
  1500. if not product_type_name:
  1501. return Response({
  1502. "error": "product_type is required"
  1503. }, status=status.HTTP_400_BAD_REQUEST)
  1504. with transaction.atomic():
  1505. # Get or create product type
  1506. product_type, created = ProductType.objects.get_or_create(name=product_type_name)
  1507. if created and not attribute_name:
  1508. return Response({
  1509. "message": f"Product type '{product_type_name}' created successfully",
  1510. "data": {"product_type": product_type_name}
  1511. }, status=status.HTTP_201_CREATED)
  1512. if attribute_name:
  1513. # Create attribute
  1514. attribute, attr_created = ProductAttribute.objects.get_or_create(
  1515. product_type=product_type,
  1516. name=attribute_name,
  1517. defaults={'is_mandatory': is_mandatory}
  1518. )
  1519. if not attr_created:
  1520. return Response({
  1521. "error": f"Attribute '{attribute_name}' already exists for product type '{product_type_name}'"
  1522. }, status=status.HTTP_400_BAD_REQUEST)
  1523. # Handle possible values
  1524. if possible_values:
  1525. for val in [v.strip() for v in possible_values.split(',') if v.strip()]:
  1526. AttributePossibleValue.objects.create(attribute=attribute, value=val)
  1527. return Response({
  1528. "message": "Attribute created successfully",
  1529. "data": {
  1530. "product_type": product_type_name,
  1531. "attribute_name": attribute_name,
  1532. "is_mandatory": "Yes" if is_mandatory else "No",
  1533. "possible_values": possible_values
  1534. }
  1535. }, status=status.HTTP_201_CREATED)
  1536. return Response({
  1537. "message": f"Product type '{product_type_name}' already exists",
  1538. "data": {"product_type": product_type_name}
  1539. }, status=status.HTTP_200_OK)
  1540. except Exception as e:
  1541. return Response({"error": str(e)}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
  1542. def put(self, request):
  1543. """
  1544. Update an existing product type attribute and its possible values.
  1545. Expected payload example:
  1546. {
  1547. "product_type": "Hardware Screws",
  1548. "attribute_name": "Material",
  1549. "is_mandatory": "Yes",
  1550. "possible_values": "Steel, Zinc Plated, Stainless Steel, Brass"
  1551. }
  1552. """
  1553. try:
  1554. product_type_name = request.data.get('product_type')
  1555. attribute_name = request.data.get('attribute_name')
  1556. is_mandatory = request.data.get('is_mandatory', '').lower() in ['yes', 'true', '1']
  1557. possible_values = request.data.get('possible_values', '')
  1558. if not all([product_type_name, attribute_name]):
  1559. return Response({
  1560. "error": "product_type and attribute_name are required"
  1561. }, status=status.HTTP_400_BAD_REQUEST)
  1562. with transaction.atomic():
  1563. try:
  1564. product_type = ProductType.objects.get(name=product_type_name)
  1565. attribute = ProductAttribute.objects.get(
  1566. product_type=product_type,
  1567. name=attribute_name
  1568. )
  1569. except ProductType.DoesNotExist:
  1570. return Response({
  1571. "error": f"Product type '{product_type_name}' not found"
  1572. }, status=status.HTTP_404_NOT_FOUND)
  1573. except ProductAttribute.DoesNotExist:
  1574. return Response({
  1575. "error": f"Attribute '{attribute_name}' not found for product type '{product_type_name}'"
  1576. }, status=status.HTTP_404_NOT_FOUND)
  1577. # Update attribute
  1578. attribute.is_mandatory = is_mandatory
  1579. attribute.save()
  1580. # Update possible values
  1581. AttributePossibleValue.objects.filter(attribute=attribute).delete()
  1582. if possible_values:
  1583. for val in [v.strip() for v in possible_values.split(',') if v.strip()]:
  1584. AttributePossibleValue.objects.create(attribute=attribute, value=val)
  1585. return Response({
  1586. "message": "Attribute updated successfully",
  1587. "data": {
  1588. "product_type": product_type_name,
  1589. "attribute_name": attribute_name,
  1590. "is_mandatory": "Yes" if is_mandatory else "No",
  1591. "possible_values": possible_values
  1592. }
  1593. }, status=status.HTTP_200_OK)
  1594. except Exception as e:
  1595. return Response({"error": str(e)}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
  1596. def delete(self, request):
  1597. """
  1598. Delete a product type or a specific attribute.
  1599. Expected payload example:
  1600. {
  1601. "product_type": "Hardware Screws",
  1602. "attribute_name": "Material"
  1603. }
  1604. """
  1605. try:
  1606. product_type_name = request.data.get('product_type')
  1607. attribute_name = request.data.get('attribute_name', '')
  1608. if not product_type_name:
  1609. return Response({
  1610. "error": "product_type is required"
  1611. }, status=status.HTTP_400_BAD_REQUEST)
  1612. with transaction.atomic():
  1613. try:
  1614. product_type = ProductType.objects.get(name=product_type_name)
  1615. except ProductType.DoesNotExist:
  1616. return Response({
  1617. "error": f"Product type '{product_type_name}' not found"
  1618. }, status=status.HTTP_404_NOT_FOUND)
  1619. if attribute_name:
  1620. # Delete specific attribute
  1621. try:
  1622. attribute = ProductAttribute.objects.get(
  1623. product_type=product_type,
  1624. name=attribute_name
  1625. )
  1626. attribute.delete()
  1627. return Response({
  1628. "message": f"Attribute '{attribute_name}' deleted successfully from product type '{product_type_name}'"
  1629. }, status=status.HTTP_200_OK)
  1630. except ProductAttribute.DoesNotExist:
  1631. return Response({
  1632. "error": f"Attribute '{attribute_name}' not found for product type '{product_type_name}'"
  1633. }, status=status.HTTP_404_NOT_FOUND)
  1634. else:
  1635. # Delete entire product type
  1636. product_type.delete()
  1637. return Response({
  1638. "message": f"Product type '{product_type_name}' and all its attributes deleted successfully"
  1639. }, status=status.HTTP_200_OK)
  1640. except Exception as e:
  1641. return Response({"error": str(e)}, status=status.HTTP_500_INTERNAL_SERVER_ERROR)
  1642. class ProductTypeListView(APIView):
  1643. """
  1644. GET API to list all product types (only names).
  1645. """
  1646. def get(self, request):
  1647. product_types = ProductType.objects.values_list('name', flat=True)
  1648. return Response({"product_types": list(product_types)}, status=status.HTTP_200_OK)
  1649. class ProductAttributeValueView(APIView):
  1650. """
  1651. API to manage manually entered original attribute values.
  1652. GET: Retrieve all attribute values for a product
  1653. POST: Create or update attribute values for a product
  1654. DELETE: Delete attribute values
  1655. """
  1656. def get(self, request):
  1657. """
  1658. Get original attribute values for a specific product or all products.
  1659. Query params: item_id (optional)
  1660. """
  1661. item_id = request.query_params.get('item_id')
  1662. if item_id:
  1663. try:
  1664. product = Product.objects.get(item_id=item_id)
  1665. values = ProductAttributeValue.objects.filter(product=product)
  1666. serializer = ProductAttributeValueSerializer(values, many=True)
  1667. return Response({
  1668. "item_id": item_id,
  1669. "attributes": serializer.data
  1670. }, status=status.HTTP_200_OK)
  1671. except Product.DoesNotExist:
  1672. return Response({
  1673. "error": f"Product with item_id '{item_id}' not found"
  1674. }, status=status.HTTP_404_NOT_FOUND)
  1675. else:
  1676. # Return all attribute values grouped by product
  1677. values = ProductAttributeValue.objects.all().select_related('product')
  1678. serializer = ProductAttributeValueSerializer(values, many=True)
  1679. return Response(serializer.data, status=status.HTTP_200_OK)
  1680. def post(self, request):
  1681. """
  1682. Create or update original attribute value for a product.
  1683. Expected payload:
  1684. {
  1685. "item_id": "3217373735",
  1686. "attribute_name": "Clothing Neck Style",
  1687. "original_value": "V-Neck Square"
  1688. }
  1689. """
  1690. serializer = ProductAttributeValueInputSerializer(data=request.data)
  1691. if not serializer.is_valid():
  1692. return Response({"error": serializer.errors}, status=status.HTTP_400_BAD_REQUEST)
  1693. validated_data = serializer.validated_data
  1694. item_id = validated_data['item_id']
  1695. attribute_name = validated_data['attribute_name']
  1696. original_value = validated_data['original_value']
  1697. try:
  1698. product = Product.objects.get(item_id=item_id)
  1699. except Product.DoesNotExist:
  1700. return Response({
  1701. "error": f"Product with item_id '{item_id}' not found"
  1702. }, status=status.HTTP_404_NOT_FOUND)
  1703. # Create or update the attribute value
  1704. attr_value, created = ProductAttributeValue.objects.update_or_create(
  1705. product=product,
  1706. attribute_name=attribute_name,
  1707. defaults={'original_value': original_value}
  1708. )
  1709. response_serializer = ProductAttributeValueSerializer(attr_value)
  1710. return Response({
  1711. "message": "Attribute value created" if created else "Attribute value updated",
  1712. "data": response_serializer.data
  1713. }, status=status.HTTP_201_CREATED if created else status.HTTP_200_OK)
  1714. def delete(self, request):
  1715. """
  1716. Delete original attribute value(s).
  1717. Expected payload:
  1718. {
  1719. "item_id": "3217373735",
  1720. "attribute_name": "Clothing Neck Style" # Optional, if not provided deletes all for product
  1721. }
  1722. """
  1723. item_id = request.data.get('item_id')
  1724. attribute_name = request.data.get('attribute_name')
  1725. if not item_id:
  1726. return Response({
  1727. "error": "item_id is required"
  1728. }, status=status.HTTP_400_BAD_REQUEST)
  1729. try:
  1730. product = Product.objects.get(item_id=item_id)
  1731. except Product.DoesNotExist:
  1732. return Response({
  1733. "error": f"Product with item_id '{item_id}' not found"
  1734. }, status=status.HTTP_404_NOT_FOUND)
  1735. if attribute_name:
  1736. # Delete specific attribute
  1737. deleted_count, _ = ProductAttributeValue.objects.filter(
  1738. product=product,
  1739. attribute_name=attribute_name
  1740. ).delete()
  1741. if deleted_count == 0:
  1742. return Response({
  1743. "error": f"Attribute '{attribute_name}' not found for product '{item_id}'"
  1744. }, status=status.HTTP_404_NOT_FOUND)
  1745. return Response({
  1746. "message": f"Attribute '{attribute_name}' deleted successfully"
  1747. }, status=status.HTTP_200_OK)
  1748. else:
  1749. # Delete all attributes for product
  1750. deleted_count, _ = ProductAttributeValue.objects.filter(product=product).delete()
  1751. return Response({
  1752. "message": f"Deleted {deleted_count} attribute(s) for product '{item_id}'"
  1753. }, status=status.HTTP_200_OK)
  1754. class BulkProductAttributeValueView(APIView):
  1755. """
  1756. API for bulk operations on original attribute values.
  1757. POST: Create/update multiple attribute values at once
  1758. """
  1759. def post(self, request):
  1760. """
  1761. Bulk create or update attribute values for multiple products.
  1762. Expected payload:
  1763. {
  1764. "products": [
  1765. {
  1766. "item_id": "3217373735",
  1767. "attributes": {
  1768. "Clothing Neck Style": "V-Neck Square",
  1769. "Condition": "New with tags"
  1770. }
  1771. },
  1772. {
  1773. "item_id": "1234567890",
  1774. "attributes": {
  1775. "Material": "Cotton",
  1776. "Size": "L"
  1777. }
  1778. }
  1779. ]
  1780. }
  1781. """
  1782. products_data = request.data.get('products', [])
  1783. if not products_data:
  1784. return Response({
  1785. "error": "products list is required"
  1786. }, status=status.HTTP_400_BAD_REQUEST)
  1787. results = []
  1788. successful = 0
  1789. failed = 0
  1790. with transaction.atomic():
  1791. for product_data in products_data:
  1792. serializer = BulkProductAttributeValueSerializer(data=product_data)
  1793. if not serializer.is_valid():
  1794. failed += 1
  1795. results.append({
  1796. "item_id": product_data.get('item_id'),
  1797. "status": "failed",
  1798. "error": serializer.errors
  1799. })
  1800. continue
  1801. validated_data = serializer.validated_data
  1802. item_id = validated_data['item_id']
  1803. attributes = validated_data['attributes']
  1804. try:
  1805. product = Product.objects.get(item_id=item_id)
  1806. created_count = 0
  1807. updated_count = 0
  1808. for attr_name, original_value in attributes.items():
  1809. _, created = ProductAttributeValue.objects.update_or_create(
  1810. product=product,
  1811. attribute_name=attr_name,
  1812. defaults={'original_value': original_value}
  1813. )
  1814. if created:
  1815. created_count += 1
  1816. else:
  1817. updated_count += 1
  1818. successful += 1
  1819. results.append({
  1820. "item_id": item_id,
  1821. "status": "success",
  1822. "created": created_count,
  1823. "updated": updated_count
  1824. })
  1825. except Product.DoesNotExist:
  1826. failed += 1
  1827. results.append({
  1828. "item_id": item_id,
  1829. "status": "failed",
  1830. "error": f"Product not found"
  1831. })
  1832. return Response({
  1833. "results": results,
  1834. "total_products": len(products_data),
  1835. "successful": successful,
  1836. "failed": failed
  1837. }, status=status.HTTP_200_OK)
  1838. class ProductListWithAttributesView(APIView):
  1839. """
  1840. GET API to list all products with their original attribute values.
  1841. """
  1842. def get(self, request):
  1843. item_id = request.query_params.get('item_id')
  1844. if item_id:
  1845. try:
  1846. product = Product.objects.get(item_id=item_id)
  1847. serializer = ProductWithAttributesSerializer(product)
  1848. return Response(serializer.data, status=status.HTTP_200_OK)
  1849. except Product.DoesNotExist:
  1850. return Response({
  1851. "error": f"Product with item_id '{item_id}' not found"
  1852. }, status=status.HTTP_404_NOT_FOUND)
  1853. else:
  1854. products = Product.objects.all()
  1855. serializer = ProductWithAttributesSerializer(products, many=True)
  1856. return Response(serializer.data, status=status.HTTP_200_OK)
  1857. class CacheManagementView(APIView):
  1858. """
  1859. API endpoint to manage caching system.
  1860. GET: Get current cache statistics and configuration
  1861. POST: Enable/disable caching or clear caches
  1862. """
  1863. def get(self, request):
  1864. """
  1865. Get current cache configuration and statistics.
  1866. """
  1867. config = cache_config.get_cache_config()
  1868. stats = ProductAttributeService.get_cache_stats()
  1869. return Response({
  1870. "configuration": config,
  1871. "statistics": stats,
  1872. "message": "Cache status retrieved successfully"
  1873. }, status=status.HTTP_200_OK)
  1874. def post(self, request):
  1875. """
  1876. Manage cache settings.
  1877. Expected payload examples:
  1878. 1. Enable/disable caching:
  1879. {
  1880. "action": "toggle",
  1881. "enable": true // or false
  1882. }
  1883. 2. Clear all caches:
  1884. {
  1885. "action": "clear"
  1886. }
  1887. 3. Clear specific cache:
  1888. {
  1889. "action": "clear",
  1890. "cache_type": "embedding" // or "attribute" or "clip"
  1891. }
  1892. 4. Get statistics:
  1893. {
  1894. "action": "stats"
  1895. }
  1896. """
  1897. action = request.data.get('action')
  1898. if not action:
  1899. return Response({
  1900. "error": "action is required",
  1901. "valid_actions": ["toggle", "clear", "stats"]
  1902. }, status=status.HTTP_400_BAD_REQUEST)
  1903. # Toggle caching on/off
  1904. if action == "toggle":
  1905. enable = request.data.get('enable')
  1906. if enable is None:
  1907. return Response({
  1908. "error": "enable parameter is required (true/false)"
  1909. }, status=status.HTTP_400_BAD_REQUEST)
  1910. # Update the cache configuration
  1911. cache_config.ENABLE_CACHING = bool(enable)
  1912. cache_config.ENABLE_ATTRIBUTE_EXTRACTION_CACHE = bool(enable)
  1913. cache_config.ENABLE_EMBEDDING_CACHE = bool(enable)
  1914. cache_config.ENABLE_CLIP_MODEL_CACHE = bool(enable)
  1915. status_msg = "enabled" if enable else "disabled"
  1916. return Response({
  1917. "message": f"Caching has been {status_msg}",
  1918. "configuration": cache_config.get_cache_config()
  1919. }, status=status.HTTP_200_OK)
  1920. # Clear caches
  1921. elif action == "clear":
  1922. cache_type = request.data.get('cache_type', 'all')
  1923. if cache_type == 'all':
  1924. ProductAttributeService.clear_all_caches()
  1925. VisualProcessingService.clear_clip_cache()
  1926. message = "All caches cleared successfully"
  1927. elif cache_type == 'embedding':
  1928. from .services import EmbeddingCache
  1929. EmbeddingCache.clear()
  1930. message = "Embedding cache cleared successfully"
  1931. elif cache_type == 'attribute':
  1932. from .services import SimpleCache
  1933. SimpleCache.clear()
  1934. message = "Attribute extraction cache cleared successfully"
  1935. elif cache_type == 'clip':
  1936. VisualProcessingService.clear_clip_cache()
  1937. message = "CLIP model cache cleared successfully"
  1938. else:
  1939. return Response({
  1940. "error": f"Invalid cache_type: {cache_type}",
  1941. "valid_types": ["all", "embedding", "attribute", "clip"]
  1942. }, status=status.HTTP_400_BAD_REQUEST)
  1943. return Response({
  1944. "message": message,
  1945. "statistics": ProductAttributeService.get_cache_stats()
  1946. }, status=status.HTTP_200_OK)
  1947. # Get statistics
  1948. elif action == "stats":
  1949. stats = ProductAttributeService.get_cache_stats()
  1950. config = cache_config.get_cache_config()
  1951. return Response({
  1952. "configuration": config,
  1953. "statistics": stats
  1954. }, status=status.HTTP_200_OK)
  1955. else:
  1956. return Response({
  1957. "error": f"Invalid action: {action}",
  1958. "valid_actions": ["toggle", "clear", "stats"]
  1959. }, status=status.HTTP_400_BAD_REQUEST)
  1960. class CacheStatsView(APIView):
  1961. """
  1962. Simple GET endpoint to retrieve cache statistics.
  1963. """
  1964. def get(self, request):
  1965. """Get current cache statistics."""
  1966. stats = ProductAttributeService.get_cache_stats()
  1967. config = cache_config.get_cache_config()
  1968. return Response({
  1969. "cache_enabled": config["master_cache_enabled"],
  1970. "statistics": stats,
  1971. "timestamp": datetime.now().isoformat()
  1972. }, status=status.HTTP_200_OK)