summaryrefslogtreecommitdiff
path: root/fixco_custom/models/upload_ginee.py
diff options
context:
space:
mode:
Diffstat (limited to 'fixco_custom/models/upload_ginee.py')
-rw-r--r--fixco_custom/models/upload_ginee.py146
1 files changed, 86 insertions, 60 deletions
diff --git a/fixco_custom/models/upload_ginee.py b/fixco_custom/models/upload_ginee.py
index b211709..19aa965 100644
--- a/fixco_custom/models/upload_ginee.py
+++ b/fixco_custom/models/upload_ginee.py
@@ -204,43 +204,53 @@ class UploadGineeLine(models.Model):
def _process_grouped_blibli_orders(self, lines):
"""Process a group of BLIBLI orders with the same invoice prefix"""
- order_ids = [line.order_id for line in lines if line.order_id]
-
+
+ order_ids = [l.order_id for l in lines if l.order_id]
if not order_ids:
raise UserError(_('Order ID is empty for one or more records in group!'))
-
- # Check if any of these orders already exist
- existing_detail = self.env['detail.order'].search([
- ('detail_order', 'ilike', order_ids[0])
- ], limit=1)
-
+
+ # ===== 1. Fast duplicate check (still same behavior) =====
+ existing_detail = self.env['detail.order'].search(
+ [('detail_order', 'ilike', order_ids[0])],
+ limit=1
+ )
if existing_detail:
return existing_detail
-
- # Call API with all order IDs in the group
- data = lines[0]._call_api(BATCH_GET_URI, {"orderIds": order_ids})
-
- # Combine items from all orders in the group
- combined_items = []
- for order_data in data.get('data', []):
- combined_items.extend(order_data.get('items', []))
-
- # Create a modified json_data structure that includes all items
+
+ # ===== 2. Single API call =====
+ data = lines[0]._call_api(
+ BATCH_GET_URI,
+ {"orderIds": order_ids}
+ )
+
+ orders_data = data.get('data', [])
+ if not orders_data:
+ raise UserError(_('No data returned from BLIBLI API'))
+
+ # ===== 3. Combine items (lebih ringkas) =====
+ combined_items = [
+ item
+ for order in orders_data
+ for item in order.get('items', [])
+ ]
+
+ base_order = orders_data[0]
+
+ # ===== 4. Build grouped payload =====
combined_json_data = {
'data': [{
- **data.get('data', [{}])[0], # Keep all original fields from first order
- 'items': combined_items, # Combined items from all orders
- 'shopId': data.get('data', [{}])[0].get('shopId'),
- 'externalOrderId': ', '.join([line.invoice_marketplace for line in lines]),
- 'orderId': ', '.join(order_ids), # Mark as grouped
+ **base_order,
+ 'items': combined_items,
+ 'externalOrderId': ', '.join(lines.mapped('invoice_marketplace')),
+ 'orderId': ', '.join(order_ids),
}]
}
-
- detail_order = self.env['detail.order'].create({
+
+ return self.env['detail.order'].create({
'detail_order': json.dumps(combined_json_data, indent=4),
'source': 'manual',
})
- return detail_order
+
def _sign_request(self, uri):
"""Membuat tanda tangan sesuai format yang berhasil"""
@@ -273,69 +283,84 @@ class UploadGineeLine(models.Model):
return response.json()
def create_so_and_detail_order(self):
- # First group BLIBLI orders by their invoice prefix
grouped_lines = {}
- for rec in self:
- if rec.detail_order_id:
- continue
+
+ # ===== 1. Grouping (lebih rapi & cepat) =====
+ for rec in self.filtered(lambda r: not r.detail_order_id):
if rec.upload_ginee_id.upload_type == 'blibli' and '-' in rec.invoice_marketplace:
- prefix = rec.invoice_marketplace.split('-')[0]
- if prefix not in grouped_lines:
- grouped_lines[prefix] = []
- grouped_lines[prefix].append(rec)
+ key = rec.invoice_marketplace.split('-')[0]
else:
- # For non-BLIBLI or BLIBLI without dash, process individually
- grouped_lines[rec.id] = [rec]
-
- # Process each group
- for group_key, lines in grouped_lines.items():
+ key = rec.id
+ grouped_lines.setdefault(key, []).append(rec)
+
+ # ===== 2. Preload sale.order (hindari query berulang) =====
+ invoice_list = [
+ line.invoice_marketplace
+ for lines in grouped_lines.values()
+ for line in lines
+ if len(lines) == 1
+ ]
+
+ existing_so_map = {}
+ if invoice_list:
+ so_records = self.env['sale.order'].search([
+ ('invoice_mp', 'in', invoice_list)
+ ])
+ for so in so_records:
+ existing_so_map.setdefault(so.invoice_mp, []).append(so.name)
+
+ # ===== 3. Process per group =====
+ for _, lines in grouped_lines.items():
try:
+ # ===== GROUPED BLIBLI =====
if len(lines) > 1:
- # Process grouped BLIBLI orders
detail_order = self._process_grouped_blibli_orders(lines)
-
- # Update all lines in the group
+ detail_order.execute_queue_detail()
+
for line in lines:
- line.update({
+ line.write({
'message_error': 'Success (grouped)',
'detail_order_id': detail_order.id
})
- detail_order.execute_queue_detail()
+
+ # ===== SINGLE LINE =====
else:
- # Process single line (non-grouped)
line = lines[0]
- so_exist = self.env['sale.order'].search(
- [('invoice_mp', 'ilike', line.invoice_marketplace)]
- )
- if so_exist:
+ if line.invoice_marketplace in existing_so_map:
raise UserError(_(
"Invoice Marketplace %s sudah terdaftar di Sale Order: %s"
) % (
line.invoice_marketplace,
- ', '.join(so_exist.mapped('name'))
+ ', '.join(existing_so_map[line.invoice_marketplace])
))
if not line.order_id:
raise UserError(_('Order ID is empty!'))
-
-
- data = line._call_api(BATCH_GET_URI, {"orderIds": [line.order_id]})
+
+ data = line._call_api(
+ BATCH_GET_URI,
+ {"orderIds": [line.order_id]}
+ )
+
detail_order = self.env['detail.order'].create({
'detail_order': json.dumps(data, indent=4),
'source': 'manual',
})
detail_order.execute_queue_detail()
-
- line.update({
+
+ line.write({
'message_error': 'Success',
'detail_order_id': detail_order.id
})
-
+
except Exception as e:
- # Update all lines in group with error if any
- for line in lines:
- line.message_error = str(e)
+ self.env['upload.ginee.line'].browse(
+ [l.id for l in lines]
+ ).write({
+ 'message_error': str(e)
+ })
+
def get_order_id(self):
for rec in self:
@@ -366,4 +391,5 @@ class UploadGineeLine(models.Model):
raise UserError(_("No orders found for invoice: %s") % rec.invoice_marketplace)
except Exception as e:
- rec.message_error = str(e) \ No newline at end of file
+ rec.message_error = str(e)
+ raise \ No newline at end of file