summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorMqdd <ahmadmiqdad27@gmail.com>2026-02-19 16:51:50 +0700
committerMqdd <ahmadmiqdad27@gmail.com>2026-02-19 16:51:50 +0700
commit6aecb5c1a2ee384b8ea2847a543142bfaa9c48f2 (patch)
tree130ef14de3f003aab37e003b1ba15dcda166eff4 /src
parent7b79adcbcf0ef7c791e0c679ca946243c9dde7f7 (diff)
<Miqdad> remove console logcr_renca_keyword
Diffstat (limited to 'src')
-rw-r--r--src/pages/api/shop/search.js16
1 files changed, 8 insertions, 8 deletions
diff --git a/src/pages/api/shop/search.js b/src/pages/api/shop/search.js
index 8954446a..f7220568 100644
--- a/src/pages/api/shop/search.js
+++ b/src/pages/api/shop/search.js
@@ -421,7 +421,7 @@ export default async function handler(req, res) {
return res.status(400).json({ error: 'No product IDs provided' });
}
- console.log(`[SEARCHKEY] Processing ${ids.length} product IDs`);
+ // console.log(`[SEARCHKEY] Processing ${ids.length} product IDs`);
// If less than 100 IDs, use single query
if (ids.length <= 100) {
@@ -440,7 +440,7 @@ export default async function handler(req, res) {
'/solr/product/select?' +
strictQuery.join('&');
- console.log('[SEARCHKEY SINGLE QUERY]', solrUrl);
+ // console.log('[SEARCHKEY SINGLE QUERY]', solrUrl);
const result = await axios(solrUrl);
result.data.response.products = productMappingSolr(
@@ -456,9 +456,9 @@ export default async function handler(req, res) {
// Batch large ID arrays into chunks of 100
const idChunks = chunkArray(ids, 100);
- console.log(
- `[SEARCHKEY BATCH] Splitting ${ids.length} IDs into ${idChunks.length} chunks`,
- );
+ // console.log(
+ // `[SEARCHKEY BATCH] Splitting ${ids.length} IDs into ${idChunks.length} chunks`,
+ // );
// Execute all chunk queries in parallel
const batchQueries = idChunks.map((chunk) => {
@@ -494,9 +494,9 @@ export default async function handler(req, res) {
batchResults.map((r) => r.data.response.docs),
);
- console.log(
- `[SEARCHKEY MERGE] Merged ${allDocs.length} unique documents from ${batchResults.length} chunks`,
- );
+ // console.log(
+ // `[SEARCHKEY MERGE] Merged ${allDocs.length} unique documents from ${batchResults.length} chunks`,
+ // );
// Apply pagination on merged results
const paginatedDocs = allDocs.slice(offset, offset + limitNum);