summaryrefslogtreecommitdiff
path: root/indoteknik_custom/models/keywords.py
diff options
context:
space:
mode:
Diffstat (limited to 'indoteknik_custom/models/keywords.py')
-rw-r--r--indoteknik_custom/models/keywords.py253
1 files changed, 253 insertions, 0 deletions
diff --git a/indoteknik_custom/models/keywords.py b/indoteknik_custom/models/keywords.py
new file mode 100644
index 00000000..3fa9dd72
--- /dev/null
+++ b/indoteknik_custom/models/keywords.py
@@ -0,0 +1,253 @@
+from itertools import product
+from multiprocessing import Condition
+from odoo import fields, models, api, tools, _
+import logging
+import re
+import pysolr
+from odoo.exceptions import UserError
+import base64
+import xlrd, xlwt
+import io
+
+
+_logger = logging.getLogger(__name__)
+solr = pysolr.Solr('http://10.148.0.5:8983/solr/searchkey/', always_commit=True, timeout=30)
+# solr = pysolr.Solr('http://127.0.0.1:8983/solr/searchkey/', always_commit=True, timeout=30)
+
+class Keywords(models.Model):
+ _name = 'keywords'
+ _order= 'id desc'
+
+ category_id = fields.Many2one('product.public.category', string='Category', required=True, help="Category to filter products when generating products for this keyword and to throw to solr")
+ keywords = fields.Char('Keywords', required=True)
+ product_ids = fields.Many2many(
+ 'product.product',
+ 'keywords_product_rel',
+ 'keyword_id',
+ 'product_id',
+ string='Products'
+ )
+ name = fields.Char('Name', compute="_compute_name")
+ skip = fields.Boolean('Skip Generate Product', default=False, help="If checked, the system will not generate products for this keyword")
+ url = fields.Char('Website URL', compute="_compute_url", help="Generated website url based on keywords")
+ sum = fields.Integer('Total Product', compute="_compute_total_product", readonly=True, help="Total products found for this keyword including variants")
+ solr_flag = fields.Integer(string='Solr Flag', default=0, help="0=no sync needed, 2=needs sync, 1=queued")
+
+ @api.depends('product_ids')
+ def _compute_total_product(self):
+ for record in self:
+ record.sum = len(record.product_ids)
+
+ @api.depends('keywords')
+ def _compute_url(self):
+ prefix = "https://indoteknik.com/searchkey/"
+ for record in self:
+ if record.keywords:
+ slug = re.sub(r'[^a-zA-Z0-9]+', '-', record.keywords.strip().lower())
+ slug = slug.strip('-')
+ record.url = prefix + slug
+ else:
+ record.url = False
+
+ def _compute_name(self):
+ for record in self:
+ if not record.name:
+ record.name = record.keywords
+
+ @api.constrains('keywords', 'category_id')
+ def check_already_exist(self):
+ model = self.env['keywords']
+ for record in self:
+ match = model.search([
+ ('keywords', '=', record.keywords),
+ ('category_id.id', '=', record.category_id.id),
+ ('id', '!=', record.id)
+ ])
+ if match:
+ raise UserError("Tidak bisa create karena keywords sudah dipakai")
+
+ def copy(self):
+ raise UserError("Duplicate Record not allowed")
+
+
+ def clear_products(self):
+ for record in self:
+ record.product_ids = [(5, 0, 0)]
+
+ def generate_products(self):
+ for record in self:
+ if not record.keywords or record.skip:
+ continue
+
+ keyword = f"%{record.keywords.strip()}%"
+
+ # AND (pt.unpublished IS FALSE OR pt.unpublished IS NULL)
+ sql = """
+ SELECT DISTINCT pp.id
+ FROM product_product pp
+ JOIN product_template pt ON pt.id = pp.product_tmpl_id
+ JOIN product_public_category_product_template_rel rel
+ ON rel.product_template_id = pt.id
+ WHERE
+ pt.product_rating >= 8
+ AND pp.active IS TRUE
+ AND (
+ pt.name ILIKE %s
+ OR pt.website_description ILIKE %s
+ )
+ """
+
+ params = [
+ keyword,
+ keyword,
+ ]
+
+ if record.category_id:
+ child_categs = self.env['product.public.category'].search([
+ ('id', 'child_of', record.category_id.id)
+ ])
+ sql += " AND rel.product_public_category_id = ANY(%s)"
+ params.append(child_categs.ids)
+
+ self.env.cr.execute(sql, params)
+
+ rows = self.env.cr.fetchall()
+ product_ids = [r[0] for r in rows]
+
+ if not product_ids:
+ raise UserError(
+ f"Tidak berhasil menemukan barang untuk keyword '{record.keywords}'"
+ )
+
+ record.with_context(skip_generate=True).write({
+ 'product_ids': [(6, 0, product_ids)]
+ })
+
+ _logger.info(
+ "Product Found: Found %s products for keyword '%s'",
+ len(product_ids),
+ record.keywords
+ )
+
+ @api.onchange('keywords', 'category_id', 'product_ids')
+ def _onchange_solr_flag(self):
+ """Set solr_flag=2 when tracked fields change to trigger queue sync"""
+ for record in self:
+ if len(record.product_ids) > 0:
+ record.solr_flag = 2
+
+ def solr_flag_to_queue(self, limit=500):
+ """Find keywords with solr_flag=2 and create apache.solr.queue entries"""
+ keywords = self.search([('solr_flag', '=', 2)], limit=limit)
+
+ for keyword in keywords:
+ # Create unique queue entry
+ queue_obj = self.env['apache.solr.queue']
+ queue_obj.create_unique({
+ 'res_model': 'keywords',
+ 'res_id': keyword.id,
+ 'function_name': '_sync_keywords_queue_callback'
+ })
+
+ # Set flag to indicate queued
+ keyword.solr_flag = 1
+
+ if keywords:
+ _logger.info(
+ 'Queued %s keywords for Solr synchronization',
+ len(keywords)
+ )
+
+ return True
+
+ def _sync_keywords_queue_callback(self):
+ success_keywords = self.browse()
+
+ for keyword in self:
+ if not keyword.product_ids:
+ _logger.info(
+ 'Skipping Solr sync for keyword "%s" - no products found',
+ keyword.keywords
+ )
+ continue
+
+ searchkey = (keyword.keywords or '').strip().lower().replace(' ', '-')
+
+ try:
+ doc = {
+ 'id': keyword.id,
+ 'category_id_i': keyword.category_id.id,
+ 'keywords_s': searchkey,
+ 'url_s': keyword.url,
+ 'product_ids_is': [p.product_tmpl_id.id for p in keyword.product_ids],
+ }
+
+ solr.add([doc])
+
+ success_keywords |= keyword
+
+ except Exception as e:
+ _logger.error(
+ "Solr sync failed for keyword ID %s: %s",
+ keyword.id, e
+ )
+
+ if success_keywords:
+ success_keywords.write({'solr_flag': 0})
+
+ return True
+
+ def sync_solr(self):
+ """Manual sync method for active_ids context (backward compatibility)"""
+ active_ids = self.env.context.get('active_ids', [])
+ if not active_ids:
+ _logger.warning("No active_ids found, nothing to sync")
+ return True
+
+ keywords = self.browse(active_ids)
+
+ documents = []
+ for keyword in keywords:
+ # Skip syncing if product count is 0
+ if len(keyword.product_ids) == 0:
+ _logger.info('Skipping Solr sync for keyword "%s" - no products found', keyword.keywords)
+ continue
+
+ searchkey = (keyword.keywords or '').strip().lower().replace(' ', '-')
+ try:
+ doc = {
+ 'id': keyword.id,
+ 'category_id_i': keyword.category_id.id,
+ 'keywords_s': searchkey,
+ 'url_s': keyword.url,
+ 'product_ids_is': [p.product_tmpl_id.id for p in keyword.product_ids],
+ }
+ documents.append(doc)
+ except Exception as e:
+ _logger.error('failed %s', e)
+ _logger.error('doc data: %s', doc)
+
+ if documents:
+ solr.add(documents)
+
+ return True
+
+ @api.model
+ def create(self, vals):
+ record = super().create(vals)
+ # self.check_already_exist()
+ # record.generate_products()
+ return record
+
+ def write(self, vals):
+ result = super().write(vals)
+
+ tracked_fields = ['keywords', 'category_id', 'product_ids', 'skip', 'name']
+ neded_sync = any(field in vals for field in tracked_fields)
+ if neded_sync:
+ for record in self:
+ # Only flag for sync if there are products
+ if len(record.product_ids) > 0:
+ record.solr_flag = 2
+
+ return result