1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
|
from itertools import product
from multiprocessing import Condition
from odoo import fields, models, api, tools, _
import logging
import re
import pysolr
from odoo.exceptions import UserError
import base64
import xlrd, xlwt
import io
_logger = logging.getLogger(__name__)
solr = pysolr.Solr('http://10.148.0.5:8983/solr/searchkey/', always_commit=True, timeout=30)
# solr = pysolr.Solr('http://127.0.0.1:8983/solr/searchkey/', always_commit=True, timeout=30)
class Keywords(models.Model):
_name = 'keywords'
_order= 'id desc'
category_id = fields.Many2one('product.public.category', string='Category', required=True, help="Category to filter products when generating products for this keyword and to throw to solr")
keywords = fields.Char('Keywords', required=True)
product_ids = fields.Many2many(
'product.product',
'keywords_product_rel',
'keyword_id',
'product_id',
string='Products'
)
name = fields.Char('Name', compute="_compute_name")
skip = fields.Boolean('Skip Generate Product', default=False, help="If checked, the system will not generate products for this keyword")
url = fields.Char('Website URL', compute="_compute_url", help="Generated website url based on keywords")
sum = fields.Integer('Total Product', compute="_compute_total_product", readonly=True, help="Total products found for this keyword including variants")
solr_flag = fields.Integer(string='Solr Flag', default=0, help="0=no sync needed, 2=needs sync, 1=queued")
@api.depends('product_ids')
def _compute_total_product(self):
for record in self:
record.sum = len(record.product_ids)
@api.depends('keywords')
def _compute_url(self):
prefix = "https://indoteknik.com/searchkey/"
for record in self:
if record.keywords:
slug = re.sub(r'[^a-zA-Z0-9]+', '-', record.keywords.strip().lower())
slug = slug.strip('-')
record.url = prefix + slug
else:
record.url = False
def _compute_name(self):
for record in self:
if not record.name:
record.name = record.keywords
@api.constrains('keywords', 'category_id')
def check_already_exist(self):
model = self.env['keywords']
for record in self:
match = model.search([
('keywords', '=', record.keywords),
('category_id.id', '=', record.category_id.id),
('id', '!=', record.id)
])
if match:
raise UserError("Tidak bisa create karena keywords sudah dipakai")
def copy(self):
raise UserError("Duplicate Record not allowed")
def clear_products(self):
for record in self:
record.product_ids = [(5, 0, 0)]
def generate_products(self):
for record in self:
if not record.keywords or record.skip:
continue
keyword = f"%{record.keywords.strip()}%"
# AND (pt.unpublished IS FALSE OR pt.unpublished IS NULL)
sql = """
SELECT DISTINCT pp.id
FROM product_product pp
JOIN product_template pt ON pt.id = pp.product_tmpl_id
JOIN product_public_category_product_template_rel rel
ON rel.product_template_id = pt.id
WHERE
pt.product_rating >= 8
AND pp.active IS TRUE
AND (
pt.name ILIKE %s
OR pt.website_description ILIKE %s
)
"""
params = [
keyword,
keyword,
]
if record.category_id:
child_categs = self.env['product.public.category'].search([
('id', 'child_of', record.category_id.id)
])
sql += " AND rel.product_public_category_id = ANY(%s)"
params.append(child_categs.ids)
self.env.cr.execute(sql, params)
rows = self.env.cr.fetchall()
product_ids = [r[0] for r in rows]
if not product_ids:
raise UserError(
f"Tidak berhasil menemukan barang untuk keyword '{record.keywords}'"
)
record.with_context(skip_generate=True).write({
'product_ids': [(6, 0, product_ids)]
})
_logger.info(
"Product Found: Found %s products for keyword '%s'",
len(product_ids),
record.keywords
)
@api.onchange('keywords', 'category_id', 'product_ids')
def _onchange_solr_flag(self):
"""Set solr_flag=2 when tracked fields change to trigger queue sync"""
for record in self:
if len(record.product_ids) > 0:
record.solr_flag = 2
def solr_flag_to_queue(self, limit=500):
"""Find keywords with solr_flag=2 and create apache.solr.queue entries"""
keywords = self.search([('solr_flag', '=', 2)], limit=limit)
for keyword in keywords:
# Create unique queue entry
queue_obj = self.env['apache.solr.queue']
queue_obj.create_unique({
'res_model': 'keywords',
'res_id': keyword.id,
'function_name': '_sync_keywords_queue_callback'
})
# Set flag to indicate queued
keyword.solr_flag = 1
if keywords:
_logger.info(
'Queued %s keywords for Solr synchronization',
len(keywords)
)
return True
def _sync_keywords_queue_callback(self):
success_keywords = self.browse()
for keyword in self:
if not keyword.product_ids:
_logger.info(
'Skipping Solr sync for keyword "%s" - no products found',
keyword.keywords
)
continue
searchkey = (keyword.keywords or '').strip().lower().replace(' ', '-')
try:
doc = {
'id': keyword.id,
'category_id_i': keyword.category_id.id,
'keywords_s': searchkey,
'url_s': keyword.url,
'product_ids_is': [p.product_tmpl_id.id for p in keyword.product_ids],
}
solr.add([doc])
success_keywords |= keyword
except Exception as e:
_logger.error(
"Solr sync failed for keyword ID %s: %s",
keyword.id, e
)
if success_keywords:
success_keywords.write({'solr_flag': 0})
return True
def sync_solr(self):
"""Manual sync method for active_ids context (backward compatibility)"""
active_ids = self.env.context.get('active_ids', [])
if not active_ids:
_logger.warning("No active_ids found, nothing to sync")
return True
keywords = self.browse(active_ids)
documents = []
for keyword in keywords:
# Skip syncing if product count is 0
if len(keyword.product_ids) == 0:
_logger.info('Skipping Solr sync for keyword "%s" - no products found', keyword.keywords)
continue
searchkey = (keyword.keywords or '').strip().lower().replace(' ', '-')
try:
doc = {
'id': keyword.id,
'category_id_i': keyword.category_id.id,
'keywords_s': searchkey,
'url_s': keyword.url,
'product_ids_is': [p.product_tmpl_id.id for p in keyword.product_ids],
}
documents.append(doc)
except Exception as e:
_logger.error('failed %s', e)
_logger.error('doc data: %s', doc)
if documents:
solr.add(documents)
return True
@api.model
def create(self, vals):
record = super().create(vals)
# self.check_already_exist()
# record.generate_products()
return record
def write(self, vals):
result = super().write(vals)
tracked_fields = ['keywords', 'category_id', 'product_ids', 'skip', 'name']
neded_sync = any(field in vals for field in tracked_fields)
if neded_sync:
for record in self:
# Only flag for sync if there are products
if len(record.product_ids) > 0:
record.solr_flag = 2
return result
|