summaryrefslogtreecommitdiff
path: root/addons/crm/models
diff options
context:
space:
mode:
authorstephanchrst <stephanchrst@gmail.com>2022-05-10 21:51:50 +0700
committerstephanchrst <stephanchrst@gmail.com>2022-05-10 21:51:50 +0700
commit3751379f1e9a4c215fb6eb898b4ccc67659b9ace (patch)
treea44932296ef4a9b71d5f010906253d8c53727726 /addons/crm/models
parent0a15094050bfde69a06d6eff798e9a8ddf2b8c21 (diff)
initial commit 2
Diffstat (limited to 'addons/crm/models')
-rw-r--r--addons/crm/models/__init__.py16
-rw-r--r--addons/crm/models/calendar.py55
-rw-r--r--addons/crm/models/crm_lead.py2133
-rw-r--r--addons/crm/models/crm_lead_scoring_frequency.py24
-rw-r--r--addons/crm/models/crm_lost_reason.py29
-rw-r--r--addons/crm/models/crm_recurring_plan.py19
-rw-r--r--addons/crm/models/crm_stage.py49
-rw-r--r--addons/crm/models/crm_team.py183
-rw-r--r--addons/crm/models/digest.py46
-rw-r--r--addons/crm/models/ir_config_parameter.py31
-rw-r--r--addons/crm/models/res_config_settings.py127
-rw-r--r--addons/crm/models/res_partner.py116
-rw-r--r--addons/crm/models/res_users.py11
-rw-r--r--addons/crm/models/utm.py30
14 files changed, 2869 insertions, 0 deletions
diff --git a/addons/crm/models/__init__.py b/addons/crm/models/__init__.py
new file mode 100644
index 00000000..7b1a7fdc
--- /dev/null
+++ b/addons/crm/models/__init__.py
@@ -0,0 +1,16 @@
+# -*- coding: utf-8 -*-
+# Part of Odoo. See LICENSE file for full copyright and licensing details.
+
+from . import res_users
+from . import calendar
+from . import crm_lead
+from . import crm_lost_reason
+from . import crm_stage
+from . import crm_team
+from . import ir_config_parameter
+from . import res_config_settings
+from . import res_partner
+from . import digest
+from . import crm_lead_scoring_frequency
+from . import utm
+from . import crm_recurring_plan
diff --git a/addons/crm/models/calendar.py b/addons/crm/models/calendar.py
new file mode 100644
index 00000000..0c55f665
--- /dev/null
+++ b/addons/crm/models/calendar.py
@@ -0,0 +1,55 @@
+# -*- coding: utf-8 -*-
+# Part of Odoo. See LICENSE file for full copyright and licensing details.
+
+from odoo import api, fields, models
+
+
+class CalendarEvent(models.Model):
+ _inherit = 'calendar.event'
+
+ @api.model
+ def default_get(self, fields):
+ if self.env.context.get('default_opportunity_id'):
+ self = self.with_context(
+ default_res_model_id=self.env.ref('crm.model_crm_lead').id,
+ default_res_id=self.env.context['default_opportunity_id']
+ )
+ defaults = super(CalendarEvent, self).default_get(fields)
+
+ # sync res_model / res_id to opportunity id (aka creating meeting from lead chatter)
+ if 'opportunity_id' not in defaults:
+ if self._is_crm_lead(defaults, self.env.context):
+ defaults['opportunity_id'] = defaults.get('res_id', False) or self.env.context.get('default_res_id', False)
+
+ return defaults
+
+ opportunity_id = fields.Many2one(
+ 'crm.lead', 'Opportunity', domain="[('type', '=', 'opportunity')]",
+ index=True, ondelete='set null')
+
+ def _compute_is_highlighted(self):
+ super(CalendarEvent, self)._compute_is_highlighted()
+ if self.env.context.get('active_model') == 'crm.lead':
+ opportunity_id = self.env.context.get('active_id')
+ for event in self:
+ if event.opportunity_id.id == opportunity_id:
+ event.is_highlighted = True
+
+ @api.model_create_multi
+ def create(self, vals):
+ events = super(CalendarEvent, self).create(vals)
+ for event in events:
+ if event.opportunity_id and not event.activity_ids:
+ event.opportunity_id.log_meeting(event.name, event.start, event.duration)
+ return events
+
+ def _is_crm_lead(self, defaults, ctx=None):
+ """
+ This method checks if the concerned model is a CRM lead.
+ The information is not always in the defaults values,
+ this is why it is necessary to check the context too.
+ """
+ res_model = defaults.get('res_model', False) or ctx and ctx.get('default_res_model')
+ res_model_id = defaults.get('res_model_id', False) or ctx and ctx.get('default_res_model_id')
+
+ return res_model and res_model == 'crm.lead' or res_model_id and self.env['ir.model'].sudo().browse(res_model_id).model == 'crm.lead'
diff --git a/addons/crm/models/crm_lead.py b/addons/crm/models/crm_lead.py
new file mode 100644
index 00000000..8cd90980
--- /dev/null
+++ b/addons/crm/models/crm_lead.py
@@ -0,0 +1,2133 @@
+# -*- coding: utf-8 -*-
+# Part of Odoo. See LICENSE file for full copyright and licensing details.
+
+import logging
+import threading
+from datetime import date, datetime, timedelta
+from psycopg2 import sql
+
+from odoo import api, fields, models, tools, SUPERUSER_ID
+from odoo.osv import expression
+from odoo.tools.translate import _
+from odoo.tools import email_re, email_split
+from odoo.exceptions import UserError, AccessError
+from odoo.addons.phone_validation.tools import phone_validation
+from collections import OrderedDict, defaultdict
+
+from . import crm_stage
+
+_logger = logging.getLogger(__name__)
+
+CRM_LEAD_FIELDS_TO_MERGE = [
+ 'name',
+ 'partner_id',
+ 'campaign_id',
+ 'company_id',
+ 'country_id',
+ 'team_id',
+ 'state_id',
+ 'stage_id',
+ 'medium_id',
+ 'source_id',
+ 'user_id',
+ 'title',
+ 'city',
+ 'contact_name',
+ 'description',
+ 'mobile',
+ 'partner_name',
+ 'phone',
+ 'probability',
+ 'expected_revenue',
+ 'street',
+ 'street2',
+ 'zip',
+ 'create_date',
+ 'date_action_last',
+ 'email_from',
+ 'email_cc',
+ 'website']
+
+# Subset of partner fields: sync any of those
+PARTNER_FIELDS_TO_SYNC = [
+ 'mobile',
+ 'title',
+ 'function',
+ 'website',
+]
+
+# Subset of partner fields: sync all or none to avoid mixed addresses
+PARTNER_ADDRESS_FIELDS_TO_SYNC = [
+ 'street',
+ 'street2',
+ 'city',
+ 'zip',
+ 'state_id',
+ 'country_id',
+]
+
+# Those values have been determined based on benchmark to minimise
+# computation time, number of transaction and transaction time.
+PLS_COMPUTE_BATCH_STEP = 50000 # odoo.models.PREFETCH_MAX = 1000 but larger cluster can speed up global computation
+PLS_UPDATE_BATCH_STEP = 5000
+
+
+class Lead(models.Model):
+ _name = "crm.lead"
+ _description = "Lead/Opportunity"
+ _order = "priority desc, id desc"
+ _inherit = ['mail.thread.cc',
+ 'mail.thread.blacklist',
+ 'mail.thread.phone',
+ 'mail.activity.mixin',
+ 'utm.mixin',
+ 'format.address.mixin',
+ 'phone.validation.mixin']
+ _primary_email = 'email_from'
+
+ # Description
+ name = fields.Char(
+ 'Opportunity', index=True, required=True,
+ compute='_compute_name', readonly=False, store=True)
+ user_id = fields.Many2one('res.users', string='Salesperson', index=True, tracking=True, default=lambda self: self.env.user)
+ user_email = fields.Char('User Email', related='user_id.email', readonly=True)
+ user_login = fields.Char('User Login', related='user_id.login', readonly=True)
+ company_id = fields.Many2one('res.company', string='Company', index=True, default=lambda self: self.env.company.id)
+ referred = fields.Char('Referred By')
+ description = fields.Text('Notes')
+ active = fields.Boolean('Active', default=True, tracking=True)
+ type = fields.Selection([
+ ('lead', 'Lead'), ('opportunity', 'Opportunity')],
+ index=True, required=True, tracking=15,
+ default=lambda self: 'lead' if self.env['res.users'].has_group('crm.group_use_lead') else 'opportunity')
+ priority = fields.Selection(
+ crm_stage.AVAILABLE_PRIORITIES, string='Priority', index=True,
+ default=crm_stage.AVAILABLE_PRIORITIES[0][0])
+ team_id = fields.Many2one(
+ 'crm.team', string='Sales Team', index=True, tracking=True,
+ compute='_compute_team_id', readonly=False, store=True)
+ stage_id = fields.Many2one(
+ 'crm.stage', string='Stage', index=True, tracking=True,
+ compute='_compute_stage_id', readonly=False, store=True,
+ copy=False, group_expand='_read_group_stage_ids', ondelete='restrict',
+ domain="['|', ('team_id', '=', False), ('team_id', '=', team_id)]")
+ kanban_state = fields.Selection([
+ ('grey', 'No next activity planned'),
+ ('red', 'Next activity late'),
+ ('green', 'Next activity is planned')], string='Kanban State',
+ compute='_compute_kanban_state')
+ activity_date_deadline_my = fields.Date(
+ 'My Activities Deadline', compute='_compute_activity_date_deadline_my',
+ search='_search_activity_date_deadline_my', compute_sudo=False,
+ readonly=True, store=False, groups="base.group_user")
+ tag_ids = fields.Many2many(
+ 'crm.tag', 'crm_tag_rel', 'lead_id', 'tag_id', string='Tags',
+ help="Classify and analyze your lead/opportunity categories like: Training, Service")
+ color = fields.Integer('Color Index', default=0)
+ # Opportunity specific
+ expected_revenue = fields.Monetary('Expected Revenue', currency_field='company_currency', tracking=True)
+ prorated_revenue = fields.Monetary('Prorated Revenue', currency_field='company_currency', store=True, compute="_compute_prorated_revenue")
+ recurring_revenue = fields.Monetary('Recurring Revenues', currency_field='company_currency', groups="crm.group_use_recurring_revenues")
+ recurring_plan = fields.Many2one('crm.recurring.plan', string="Recurring Plan", groups="crm.group_use_recurring_revenues")
+ recurring_revenue_monthly = fields.Monetary('Expected MRR', currency_field='company_currency', store=True,
+ compute="_compute_recurring_revenue_monthly",
+ groups="crm.group_use_recurring_revenues")
+ recurring_revenue_monthly_prorated = fields.Monetary('Prorated MRR', currency_field='company_currency', store=True,
+ compute="_compute_recurring_revenue_monthly_prorated",
+ groups="crm.group_use_recurring_revenues")
+ company_currency = fields.Many2one("res.currency", string='Currency', related='company_id.currency_id', readonly=True)
+ # Dates
+ date_closed = fields.Datetime('Closed Date', readonly=True, copy=False)
+ date_action_last = fields.Datetime('Last Action', readonly=True)
+ date_open = fields.Datetime(
+ 'Assignment Date', compute='_compute_date_open', readonly=True, store=True)
+ day_open = fields.Float('Days to Assign', compute='_compute_day_open', store=True)
+ day_close = fields.Float('Days to Close', compute='_compute_day_close', store=True)
+ date_last_stage_update = fields.Datetime(
+ 'Last Stage Update', compute='_compute_date_last_stage_update', index=True, readonly=True, store=True)
+ date_conversion = fields.Datetime('Conversion Date', readonly=True)
+ date_deadline = fields.Date('Expected Closing', help="Estimate of the date on which the opportunity will be won.")
+ # Customer / contact
+ partner_id = fields.Many2one(
+ 'res.partner', string='Customer', index=True, tracking=10,
+ domain="['|', ('company_id', '=', False), ('company_id', '=', company_id)]",
+ help="Linked partner (optional). Usually created when converting the lead. You can find a partner by its Name, TIN, Email or Internal Reference.")
+ partner_is_blacklisted = fields.Boolean('Partner is blacklisted', related='partner_id.is_blacklisted', readonly=True)
+ contact_name = fields.Char(
+ 'Contact Name', tracking=30,
+ compute='_compute_contact_name', readonly=False, store=True)
+ partner_name = fields.Char(
+ 'Company Name', tracking=20, index=True,
+ compute='_compute_partner_name', readonly=False, store=True,
+ help='The name of the future partner company that will be created while converting the lead into opportunity')
+ function = fields.Char('Job Position', compute='_compute_function', readonly=False, store=True)
+ title = fields.Many2one('res.partner.title', string='Title', compute='_compute_title', readonly=False, store=True)
+ email_from = fields.Char(
+ 'Email', tracking=40, index=True,
+ compute='_compute_email_from', inverse='_inverse_email_from', readonly=False, store=True)
+ phone = fields.Char(
+ 'Phone', tracking=50,
+ compute='_compute_phone', inverse='_inverse_phone', readonly=False, store=True)
+ mobile = fields.Char('Mobile', compute='_compute_mobile', readonly=False, store=True)
+ phone_mobile_search = fields.Char('Phone/Mobile', store=False, search='_search_phone_mobile_search')
+ phone_state = fields.Selection([
+ ('correct', 'Correct'),
+ ('incorrect', 'Incorrect')], string='Phone Quality', compute="_compute_phone_state", store=True)
+ email_state = fields.Selection([
+ ('correct', 'Correct'),
+ ('incorrect', 'Incorrect')], string='Email Quality', compute="_compute_email_state", store=True)
+ website = fields.Char('Website', index=True, help="Website of the contact", compute="_compute_website", readonly=False, store=True)
+ lang_id = fields.Many2one('res.lang', string='Language')
+ # Address fields
+ street = fields.Char('Street', compute='_compute_partner_address_values', readonly=False, store=True)
+ street2 = fields.Char('Street2', compute='_compute_partner_address_values', readonly=False, store=True)
+ zip = fields.Char('Zip', change_default=True, compute='_compute_partner_address_values', readonly=False, store=True)
+ city = fields.Char('City', compute='_compute_partner_address_values', readonly=False, store=True)
+ state_id = fields.Many2one(
+ "res.country.state", string='State',
+ compute='_compute_partner_address_values', readonly=False, store=True,
+ domain="[('country_id', '=?', country_id)]")
+ country_id = fields.Many2one(
+ 'res.country', string='Country',
+ compute='_compute_partner_address_values', readonly=False, store=True)
+ # Probability (Opportunity only)
+ probability = fields.Float(
+ 'Probability', group_operator="avg", copy=False,
+ compute='_compute_probabilities', readonly=False, store=True)
+ automated_probability = fields.Float('Automated Probability', compute='_compute_probabilities', readonly=True, store=True)
+ is_automated_probability = fields.Boolean('Is automated probability?', compute="_compute_is_automated_probability")
+ # External records
+ meeting_count = fields.Integer('# Meetings', compute='_compute_meeting_count')
+ lost_reason = fields.Many2one(
+ 'crm.lost.reason', string='Lost Reason',
+ index=True, ondelete='restrict', tracking=True)
+ ribbon_message = fields.Char('Ribbon message', compute='_compute_ribbon_message')
+
+ _sql_constraints = [
+ ('check_probability', 'check(probability >= 0 and probability <= 100)', 'The probability of closing the deal should be between 0% and 100%!')
+ ]
+
+ @api.depends('activity_date_deadline')
+ def _compute_kanban_state(self):
+ today = date.today()
+ for lead in self:
+ kanban_state = 'grey'
+ if lead.activity_date_deadline:
+ lead_date = fields.Date.from_string(lead.activity_date_deadline)
+ if lead_date >= today:
+ kanban_state = 'green'
+ else:
+ kanban_state = 'red'
+ lead.kanban_state = kanban_state
+
+ @api.depends('activity_ids.date_deadline')
+ @api.depends_context('uid')
+ def _compute_activity_date_deadline_my(self):
+ todo_activities = []
+ if self.ids:
+ todo_activities = self.env['mail.activity'].search([
+ ('user_id', '=', self._uid),
+ ('res_model', '=', self._name),
+ ('res_id', 'in', self.ids)
+ ], order='date_deadline ASC')
+
+ for record in self:
+ record.activity_date_deadline_my = next(
+ (activity.date_deadline for activity in todo_activities if activity.res_id == record.id),
+ False
+ )
+
+ def _search_activity_date_deadline_my(self, operator, operand):
+ return ['&', ('activity_ids.user_id', '=', self._uid), ('activity_ids.date_deadline', operator, operand)]
+
+ @api.depends('user_id', 'type')
+ def _compute_team_id(self):
+ """ When changing the user, also set a team_id or restrict team id
+ to the ones user_id is member of. """
+ for lead in self:
+ # setting user as void should not trigger a new team computation
+ if not lead.user_id:
+ continue
+ user = lead.user_id
+ if lead.team_id and user in lead.team_id.member_ids | lead.team_id.user_id:
+ continue
+ team_domain = [('use_leads', '=', True)] if lead.type == 'lead' else [('use_opportunities', '=', True)]
+ team = self.env['crm.team']._get_default_team_id(user_id=user.id, domain=team_domain)
+ lead.team_id = team.id
+
+ @api.depends('team_id', 'type')
+ def _compute_stage_id(self):
+ for lead in self:
+ if not lead.stage_id:
+ lead.stage_id = lead._stage_find(domain=[('fold', '=', False)]).id
+
+ @api.depends('user_id')
+ def _compute_date_open(self):
+ for lead in self:
+ lead.date_open = fields.Datetime.now() if lead.user_id else False
+
+ @api.depends('stage_id')
+ def _compute_date_last_stage_update(self):
+ for lead in self:
+ lead.date_last_stage_update = fields.Datetime.now()
+
+ @api.depends('create_date', 'date_open')
+ def _compute_day_open(self):
+ """ Compute difference between create date and open date """
+ leads = self.filtered(lambda l: l.date_open and l.create_date)
+ others = self - leads
+ others.day_open = None
+ for lead in leads:
+ date_create = fields.Datetime.from_string(lead.create_date).replace(microsecond=0)
+ date_open = fields.Datetime.from_string(lead.date_open)
+ lead.day_open = abs((date_open - date_create).days)
+
+ @api.depends('create_date', 'date_closed')
+ def _compute_day_close(self):
+ """ Compute difference between current date and log date """
+ leads = self.filtered(lambda l: l.date_closed and l.create_date)
+ others = self - leads
+ others.day_close = None
+ for lead in leads:
+ date_create = fields.Datetime.from_string(lead.create_date)
+ date_close = fields.Datetime.from_string(lead.date_closed)
+ lead.day_close = abs((date_close - date_create).days)
+
+ @api.depends('partner_id')
+ def _compute_name(self):
+ for lead in self:
+ if not lead.name and lead.partner_id and lead.partner_id.name:
+ lead.name = _("%s's opportunity") % lead.partner_id.name
+
+ @api.depends('partner_id')
+ def _compute_contact_name(self):
+ """ compute the new values when partner_id has changed """
+ for lead in self:
+ lead.update(lead._prepare_contact_name_from_partner(lead.partner_id))
+
+ @api.depends('partner_id')
+ def _compute_partner_name(self):
+ """ compute the new values when partner_id has changed """
+ for lead in self:
+ lead.update(lead._prepare_partner_name_from_partner(lead.partner_id))
+
+ @api.depends('partner_id')
+ def _compute_function(self):
+ """ compute the new values when partner_id has changed """
+ for lead in self:
+ if not lead.function or lead.partner_id.function:
+ lead.function = lead.partner_id.function
+
+ @api.depends('partner_id')
+ def _compute_title(self):
+ """ compute the new values when partner_id has changed """
+ for lead in self:
+ if not lead.title or lead.partner_id.title:
+ lead.title = lead.partner_id.title
+
+ @api.depends('partner_id')
+ def _compute_mobile(self):
+ """ compute the new values when partner_id has changed """
+ for lead in self:
+ if not lead.mobile or lead.partner_id.mobile:
+ lead.mobile = lead.partner_id.mobile
+
+ @api.depends('partner_id')
+ def _compute_website(self):
+ """ compute the new values when partner_id has changed """
+ for lead in self:
+ if not lead.website or lead.partner_id.website:
+ lead.website = lead.partner_id.website
+
+ @api.depends('partner_id')
+ def _compute_partner_address_values(self):
+ """ Sync all or none of address fields """
+ for lead in self:
+ lead.update(lead._prepare_address_values_from_partner(lead.partner_id))
+
+ @api.depends('partner_id.email')
+ def _compute_email_from(self):
+ for lead in self:
+ if lead.partner_id.email and lead._get_partner_email_update():
+ lead.email_from = lead.partner_id.email
+
+ def _inverse_email_from(self):
+ for lead in self:
+ if lead._get_partner_email_update():
+ lead.partner_id.email = lead.email_from
+
+ @api.depends('partner_id.phone')
+ def _compute_phone(self):
+ for lead in self:
+ if lead.partner_id.phone and lead._get_partner_phone_update():
+ lead.phone = lead.partner_id.phone
+
+ def _inverse_phone(self):
+ for lead in self:
+ if lead._get_partner_phone_update():
+ lead.partner_id.phone = lead.phone
+
+ @api.depends('phone', 'country_id.code')
+ def _compute_phone_state(self):
+ for lead in self:
+ phone_status = False
+ if lead.phone:
+ country_code = lead.country_id.code if lead.country_id and lead.country_id.code else None
+ try:
+ if phone_validation.phone_parse(lead.phone, country_code): # otherwise library not installed
+ phone_status = 'correct'
+ except UserError:
+ phone_status = 'incorrect'
+ lead.phone_state = phone_status
+
+ @api.depends('email_from')
+ def _compute_email_state(self):
+ for lead in self:
+ email_state = False
+ if lead.email_from:
+ email_state = 'incorrect'
+ for email in email_split(lead.email_from):
+ if tools.email_normalize(email):
+ email_state = 'correct'
+ break
+ lead.email_state = email_state
+
+ @api.depends('probability', 'automated_probability')
+ def _compute_is_automated_probability(self):
+ """ If probability and automated_probability are equal probability computation
+ is considered as automatic, aka probability is sync with automated_probability """
+ for lead in self:
+ lead.is_automated_probability = tools.float_compare(lead.probability, lead.automated_probability, 2) == 0
+
+ @api.depends(lambda self: ['tag_ids', 'stage_id', 'team_id'] + self._pls_get_safe_fields())
+ def _compute_probabilities(self):
+ lead_probabilities = self._pls_get_naive_bayes_probabilities()
+ for lead in self:
+ if lead.id in lead_probabilities:
+ was_automated = lead.active and lead.is_automated_probability
+ lead.automated_probability = lead_probabilities[lead.id]
+ if was_automated:
+ lead.probability = lead.automated_probability
+
+ @api.depends('expected_revenue', 'probability')
+ def _compute_prorated_revenue(self):
+ for lead in self:
+ lead.prorated_revenue = round((lead.expected_revenue or 0.0) * (lead.probability or 0) / 100.0, 2)
+
+ @api.depends('recurring_revenue', 'recurring_plan.number_of_months')
+ def _compute_recurring_revenue_monthly(self):
+ for lead in self:
+ lead.recurring_revenue_monthly = (lead.recurring_revenue or 0.0) / (lead.recurring_plan.number_of_months or 1)
+
+ @api.depends('recurring_revenue_monthly', 'probability')
+ def _compute_recurring_revenue_monthly_prorated(self):
+ for lead in self:
+ lead.recurring_revenue_monthly_prorated = (lead.recurring_revenue_monthly or 0.0) * (lead.probability or 0) / 100.0
+
+ def _compute_meeting_count(self):
+ if self.ids:
+ meeting_data = self.env['calendar.event'].sudo().read_group([
+ ('opportunity_id', 'in', self.ids)
+ ], ['opportunity_id'], ['opportunity_id'])
+ mapped_data = {m['opportunity_id'][0]: m['opportunity_id_count'] for m in meeting_data}
+ else:
+ mapped_data = dict()
+ for lead in self:
+ lead.meeting_count = mapped_data.get(lead.id, 0)
+
+ @api.depends('email_from', 'phone', 'partner_id')
+ def _compute_ribbon_message(self):
+ for lead in self:
+ will_write_email = lead._get_partner_email_update()
+ will_write_phone = lead._get_partner_phone_update()
+
+ if will_write_email and will_write_phone:
+ lead.ribbon_message = _('By saving this change, the customer email and phone number will also be updated.')
+ elif will_write_email:
+ lead.ribbon_message = _('By saving this change, the customer email will also be updated.')
+ elif will_write_phone:
+ lead.ribbon_message = _('By saving this change, the customer phone number will also be updated.')
+ else:
+ lead.ribbon_message = False
+
+ def _search_phone_mobile_search(self, operator, value):
+ if len(value) <= 2:
+ raise UserError(_('Please enter at least 3 digits when searching on phone / mobile.'))
+
+ query = f"""
+ SELECT model.id
+ FROM {self._table} model
+ WHERE REGEXP_REPLACE(model.phone, '[^\d+]+', '', 'g') SIMILAR TO CONCAT(%s, REGEXP_REPLACE(%s, '\D+', '', 'g'), '%%')
+ OR REGEXP_REPLACE(model.mobile, '[^\d+]+', '', 'g') SIMILAR TO CONCAT(%s, REGEXP_REPLACE(%s, '\D+', '', 'g'), '%%')
+ """
+
+ # searching on +32485112233 should also finds 00485112233 (00 / + prefix are both valid)
+ # we therefore remove it from input value and search for both of them in db
+ if value.startswith('+') or value.startswith('00'):
+ if value.startswith('00'):
+ value = value[2:]
+ starts_with = '00|\+'
+ else:
+ starts_with = '%'
+
+ self._cr.execute(query, (starts_with, value, starts_with, value))
+ res = self._cr.fetchall()
+ if not res:
+ return [(0, '=', 1)]
+ return [('id', 'in', [r[0] for r in res])]
+
+ @api.onchange('phone', 'country_id', 'company_id')
+ def _onchange_phone_validation(self):
+ if self.phone:
+ self.phone = self.phone_format(self.phone)
+
+ @api.onchange('mobile', 'country_id', 'company_id')
+ def _onchange_mobile_validation(self):
+ if self.mobile:
+ self.mobile = self.phone_format(self.mobile)
+
+ def _prepare_values_from_partner(self, partner):
+ """ Get a dictionary with values coming from partner information to
+ copy on a lead. Non-address fields get the current lead
+ values to avoid being reset if partner has no value for them. """
+
+ # Sync all address fields from partner, or none, to avoid mixing them.
+ values = self._prepare_address_values_from_partner(partner)
+
+ # For other fields, get the info from the partner, but only if set
+ values.update({f: partner[f] or self[f] for f in PARTNER_FIELDS_TO_SYNC})
+
+ # Fields with specific logic
+ values.update(self._prepare_contact_name_from_partner(partner))
+ values.update(self._prepare_partner_name_from_partner(partner))
+
+ return self._convert_to_write(values)
+
+ def _prepare_address_values_from_partner(self, partner):
+ # Sync all address fields from partner, or none, to avoid mixing them.
+ if any(partner[f] for f in PARTNER_ADDRESS_FIELDS_TO_SYNC):
+ values = {f: partner[f] for f in PARTNER_ADDRESS_FIELDS_TO_SYNC}
+ else:
+ values = {f: self[f] for f in PARTNER_ADDRESS_FIELDS_TO_SYNC}
+ return values
+
+ def _prepare_contact_name_from_partner(self, partner):
+ contact_name = False if partner.is_company else partner.name
+ return {'contact_name': contact_name or self.contact_name}
+
+ def _prepare_partner_name_from_partner(self, partner):
+ partner_name = partner.parent_id.name
+ if not partner_name and partner.is_company:
+ partner_name = partner.name
+ return {'partner_name': partner_name or self.partner_name}
+
+ def _get_partner_email_update(self):
+ """Calculate if we should write the email on the related partner. When
+ the email of the lead / partner is an empty string, we force it to False
+ to not propagate a False on an empty string.
+
+ Done in a separate method so it can be used in both ribbon and inverse
+ and compute of email update methods.
+ """
+ self.ensure_one()
+ if self.partner_id and self.email_from != self.partner_id.email:
+ lead_email_normalized = tools.email_normalize(self.email_from) or self.email_from or False
+ partner_email_normalized = tools.email_normalize(self.partner_id.email) or self.partner_id.email or False
+ return lead_email_normalized != partner_email_normalized
+ return False
+
+ def _get_partner_phone_update(self):
+ """Calculate if we should write the phone on the related partner. When
+ the phone of the lead / partner is an empty string, we force it to False
+ to not propagate a False on an empty string.
+
+ Done in a separate method so it can be used in both ribbon and inverse
+ and compute of phone update methods.
+ """
+ self.ensure_one()
+ if self.partner_id and self.phone != self.partner_id.phone:
+ lead_phone_formatted = self.phone_format(self.phone) if self.phone else False or self.phone or False
+ partner_phone_formatted = self.phone_format(self.partner_id.phone) if self.partner_id.phone else False or self.partner_id.phone or False
+ return lead_phone_formatted != partner_phone_formatted
+ return False
+
+ # ------------------------------------------------------------
+ # ORM
+ # ------------------------------------------------------------
+
+ def _auto_init(self):
+ res = super(Lead, self)._auto_init()
+ tools.create_index(self._cr, 'crm_lead_user_id_team_id_type_index',
+ self._table, ['user_id', 'team_id', 'type'])
+ tools.create_index(self._cr, 'crm_lead_create_date_team_id_idx',
+ self._table, ['create_date', 'team_id'])
+ return res
+
+ @api.model_create_multi
+ def create(self, vals_list):
+ for vals in vals_list:
+ if vals.get('website'):
+ vals['website'] = self.env['res.partner']._clean_website(vals['website'])
+ leads = super(Lead, self).create(vals_list)
+
+ for lead, values in zip(leads, vals_list):
+ if any(field in ['active', 'stage_id'] for field in values):
+ lead._handle_won_lost(values)
+
+ return leads
+
+ def write(self, vals):
+ if vals.get('website'):
+ vals['website'] = self.env['res.partner']._clean_website(vals['website'])
+
+ # stage change: update date_last_stage_update
+ if 'stage_id' in vals:
+ stage_id = self.env['crm.stage'].browse(vals['stage_id'])
+ if stage_id.is_won:
+ vals.update({'probability': 100, 'automated_probability': 100})
+
+ # stage change with new stage: update probability and date_closed
+ if vals.get('probability', 0) >= 100 or not vals.get('active', True):
+ vals['date_closed'] = fields.Datetime.now()
+ elif 'probability' in vals:
+ vals['date_closed'] = False
+
+ if any(field in ['active', 'stage_id'] for field in vals):
+ self._handle_won_lost(vals)
+
+ write_result = super(Lead, self).write(vals)
+
+ return write_result
+
+ @api.model
+ def search(self, args, offset=0, limit=None, order=None, count=False):
+ """ Override to support ordering on activity_date_deadline_my.
+
+ Ordering through web client calls search_read with an order parameter set.
+ Search_read then calls search. In this override we therefore override search
+ to intercept a search without count with an order on activity_date_deadline_my.
+ In that case we do the search in two steps.
+
+ First step: fill with deadline-based results
+
+ * Perform a read_group on my activities to get a mapping lead_id / deadline
+ Remember date_deadline is required, we always have a value for it. Only
+ the earliest deadline per lead is kept.
+ * Search leads linked to those activities that also match the asked domain
+ and order from the original search request.
+ * Results of that search will be at the top of returned results. Use limit
+ None because we have to search all leads linked to activities as ordering
+ on deadline is done in post processing.
+ * Reorder them according to deadline asc or desc depending on original
+ search ordering. Finally take only a subset of those leads to fill with
+ results matching asked offset / limit.
+
+ Second step: fill with other results. If first step does not gives results
+ enough to match offset and limit parameters we fill with a search on other
+ leads. We keep the asked domain and ordering while filtering out already
+ scanned leads to keep a coherent results.
+
+ All other search and search_read are left untouched by this override to avoid
+ side effects. Search_count is not affected by this override.
+ """
+ if count or not order or 'activity_date_deadline_my' not in order:
+ return super(Lead, self).search(args, offset=offset, limit=limit, order=order, count=count)
+ order_items = [order_item.strip().lower() for order_item in (order or self._order).split(',')]
+
+ # Perform a read_group on my activities to get a mapping lead_id / deadline
+ # Remember date_deadline is required, we always have a value for it. Only
+ # the earliest deadline per lead is kept.
+ activity_asc = any('activity_date_deadline_my asc' in item for item in order_items)
+ my_lead_activities = self.env['mail.activity'].read_group(
+ [('res_model', '=', self._name), ('user_id', '=', self.env.uid)],
+ ['res_id', 'date_deadline:min'],
+ ['res_id'],
+ orderby='date_deadline ASC'
+ )
+ my_lead_mapping = dict((item['res_id'], item['date_deadline']) for item in my_lead_activities)
+ my_lead_ids = list(my_lead_mapping.keys())
+ my_lead_domain = expression.AND([[('id', 'in', my_lead_ids)], args])
+ my_lead_order = ', '.join(item for item in order_items if 'activity_date_deadline_my' not in item)
+
+ # Search leads linked to those activities and order them. See docstring
+ # of this method for more details.
+ search_res = super(Lead, self).search(my_lead_domain, offset=0, limit=None, order=my_lead_order, count=count)
+ my_lead_ids_ordered = sorted(search_res.ids, key=lambda lead_id: my_lead_mapping[lead_id], reverse=not activity_asc)
+ # keep only requested window (offset + limit, or offset+)
+ my_lead_ids_keep = my_lead_ids_ordered[offset:(offset + limit)] if limit else my_lead_ids_ordered[offset:]
+ # keep list of already skipped lead ids to exclude them from future search
+ my_lead_ids_skip = my_lead_ids_ordered[:(offset + limit)] if limit else my_lead_ids_ordered
+
+ # do not go further if limit is achieved
+ if limit and len(my_lead_ids_keep) >= limit:
+ return self.browse(my_lead_ids_keep)
+
+ # Fill with remaining leads. If a limit is given, simply remove count of
+ # already fetched. Otherwise keep none. If an offset is set we have to
+ # reduce it by already fetch results hereabove. Order is updated to exclude
+ # activity_date_deadline_my when calling super() .
+ lead_limit = (limit - len(my_lead_ids_keep)) if limit else None
+ if offset:
+ lead_offset = max((offset - len(search_res), 0))
+ else:
+ lead_offset = 0
+ lead_order = ', '.join(item for item in order_items if 'activity_date_deadline_my' not in item)
+
+ other_lead_res = super(Lead, self).search(
+ expression.AND([[('id', 'not in', my_lead_ids_skip)], args]),
+ offset=lead_offset, limit=lead_limit, order=lead_order, count=count
+ )
+ return self.browse(my_lead_ids_keep) + other_lead_res
+
+ def _handle_won_lost(self, vals):
+ """ This method handle the state changes :
+ - To lost : We need to increment corresponding lost count in scoring frequency table
+ - To won : We need to increment corresponding won count in scoring frequency table
+ - From lost to Won : We need to decrement corresponding lost count + increment corresponding won count
+ in scoring frequency table.
+ - From won to lost : We need to decrement corresponding won count + increment corresponding lost count
+ in scoring frequency table."""
+ Lead = self.env['crm.lead']
+ leads_reach_won = Lead
+ leads_leave_won = Lead
+ leads_reach_lost = Lead
+ leads_leave_lost = Lead
+ won_stage_ids = self.env['crm.stage'].search([('is_won', '=', True)]).ids
+ for lead in self:
+ if 'stage_id' in vals:
+ if vals['stage_id'] in won_stage_ids:
+ if lead.probability == 0:
+ leads_leave_lost |= lead
+ leads_reach_won |= lead
+ elif lead.stage_id.id in won_stage_ids and lead.active: # a lead can be lost at won_stage
+ leads_leave_won |= lead
+ if 'active' in vals:
+ if not vals['active'] and lead.active: # archive lead
+ if lead.stage_id.id in won_stage_ids and lead not in leads_leave_won:
+ leads_leave_won |= lead
+ leads_reach_lost |= lead
+ elif vals['active'] and not lead.active: # restore lead
+ leads_leave_lost |= lead
+
+ leads_reach_won._pls_increment_frequencies(to_state='won')
+ leads_leave_won._pls_increment_frequencies(from_state='won')
+ leads_reach_lost._pls_increment_frequencies(to_state='lost')
+ leads_leave_lost._pls_increment_frequencies(from_state='lost')
+
+ @api.returns('self', lambda value: value.id)
+ def copy(self, default=None):
+ self.ensure_one()
+ # set default value in context, if not already set (Put stage to 'new' stage)
+ context = dict(self._context)
+ context.setdefault('default_type', self.type)
+ context.setdefault('default_team_id', self.team_id.id)
+ # Set date_open to today if it is an opp
+ default = default or {}
+ default['date_open'] = fields.Datetime.now() if self.type == 'opportunity' else False
+ # Do not assign to an archived user
+ if not self.user_id.active:
+ default['user_id'] = False
+ if not self.env.user.has_group('crm.group_use_recurring_revenues'):
+ default['recurring_revenue'] = 0
+ default['recurring_plan'] = False
+ return super(Lead, self.with_context(context)).copy(default=default)
+
+ @api.model
+ def _fields_view_get(self, view_id=None, view_type='form', toolbar=False, submenu=False):
+ if self._context.get('opportunity_id'):
+ opportunity = self.browse(self._context['opportunity_id'])
+ action = opportunity.get_formview_action()
+ if action.get('views') and any(view_id for view_id in action['views'] if view_id[1] == view_type):
+ view_id = next(view_id[0] for view_id in action['views'] if view_id[1] == view_type)
+ res = super(Lead, self)._fields_view_get(view_id=view_id, view_type=view_type, toolbar=toolbar, submenu=submenu)
+ if view_type == 'form':
+ res['arch'] = self._fields_view_get_address(res['arch'])
+ return res
+
+ @api.model
+ def _read_group_stage_ids(self, stages, domain, order):
+ # retrieve team_id from the context and write the domain
+ # - ('id', 'in', stages.ids): add columns that should be present
+ # - OR ('fold', '=', False): add default columns that are not folded
+ # - OR ('team_ids', '=', team_id), ('fold', '=', False) if team_id: add team columns that are not folded
+ team_id = self._context.get('default_team_id')
+ if team_id:
+ search_domain = ['|', ('id', 'in', stages.ids), '|', ('team_id', '=', False), ('team_id', '=', team_id)]
+ else:
+ search_domain = ['|', ('id', 'in', stages.ids), ('team_id', '=', False)]
+
+ # perform search
+ stage_ids = stages._search(search_domain, order=order, access_rights_uid=SUPERUSER_ID)
+ return stages.browse(stage_ids)
+
+ def _stage_find(self, team_id=False, domain=None, order='sequence'):
+ """ Determine the stage of the current lead with its teams, the given domain and the given team_id
+ :param team_id
+ :param domain : base search domain for stage
+ :returns crm.stage recordset
+ """
+ # collect all team_ids by adding given one, and the ones related to the current leads
+ team_ids = set()
+ if team_id:
+ team_ids.add(team_id)
+ for lead in self:
+ if lead.team_id:
+ team_ids.add(lead.team_id.id)
+ # generate the domain
+ if team_ids:
+ search_domain = ['|', ('team_id', '=', False), ('team_id', 'in', list(team_ids))]
+ else:
+ search_domain = [('team_id', '=', False)]
+ # AND with the domain in parameter
+ if domain:
+ search_domain += list(domain)
+ # perform search, return the first found
+ return self.env['crm.stage'].search(search_domain, order=order, limit=1)
+
+ # ------------------------------------------------------------
+ # ACTIONS
+ # ------------------------------------------------------------
+
+ def toggle_active(self):
+ """ When archiving: mark probability as 0. When re-activating
+ update probability again, for leads and opportunities. """
+ res = super(Lead, self).toggle_active()
+ activated = self.filtered(lambda lead: lead.active)
+ archived = self.filtered(lambda lead: not lead.active)
+ if activated:
+ activated.write({'lost_reason': False})
+ activated._compute_probabilities()
+ if archived:
+ archived.write({'probability': 0, 'automated_probability': 0})
+ return res
+
+ def action_set_lost(self, **additional_values):
+ """ Lost semantic: probability = 0 or active = False """
+ res = self.action_archive()
+ if additional_values:
+ self.write(dict(additional_values))
+ return res
+
+ def action_set_won(self):
+ """ Won semantic: probability = 100 (active untouched) """
+ self.action_unarchive()
+ # group the leads by team_id, in order to write once by values couple (each write leads to frequency increment)
+ leads_by_won_stage = {}
+ for lead in self:
+ stage_id = lead._stage_find(domain=[('is_won', '=', True)])
+ if stage_id in leads_by_won_stage:
+ leads_by_won_stage[stage_id] |= lead
+ else:
+ leads_by_won_stage[stage_id] = lead
+ for won_stage_id, leads in leads_by_won_stage.items():
+ leads.write({'stage_id': won_stage_id.id, 'probability': 100})
+ return True
+
+ def action_set_automated_probability(self):
+ self.write({'probability': self.automated_probability})
+
+ def action_set_won_rainbowman(self):
+ self.ensure_one()
+ self.action_set_won()
+
+ message = self._get_rainbowman_message()
+ if message:
+ return {
+ 'effect': {
+ 'fadeout': 'slow',
+ 'message': message,
+ 'img_url': '/web/image/%s/%s/image_1024' % (self.team_id.user_id._name, self.team_id.user_id.id) if self.team_id.user_id.image_1024 else '/web/static/src/img/smile.svg',
+ 'type': 'rainbow_man',
+ }
+ }
+ return True
+
+ def get_rainbowman_message(self):
+ self.ensure_one()
+ if self.stage_id.is_won:
+ return self._get_rainbowman_message()
+ return False
+
+ def _get_rainbowman_message(self):
+ message = False
+ if self.user_id and self.team_id and self.expected_revenue:
+ self.flush() # flush fields to make sure DB is up to date
+ query = """
+ SELECT
+ SUM(CASE WHEN user_id = %(user_id)s THEN 1 ELSE 0 END) as total_won,
+ MAX(CASE WHEN date_closed >= CURRENT_DATE - INTERVAL '30 days' AND user_id = %(user_id)s THEN expected_revenue ELSE 0 END) as max_user_30,
+ MAX(CASE WHEN date_closed >= CURRENT_DATE - INTERVAL '7 days' AND user_id = %(user_id)s THEN expected_revenue ELSE 0 END) as max_user_7,
+ MAX(CASE WHEN date_closed >= CURRENT_DATE - INTERVAL '30 days' AND team_id = %(team_id)s THEN expected_revenue ELSE 0 END) as max_team_30,
+ MAX(CASE WHEN date_closed >= CURRENT_DATE - INTERVAL '7 days' AND team_id = %(team_id)s THEN expected_revenue ELSE 0 END) as max_team_7
+ FROM crm_lead
+ WHERE
+ type = 'opportunity'
+ AND
+ active = True
+ AND
+ probability = 100
+ AND
+ DATE_TRUNC('year', date_closed) = DATE_TRUNC('year', CURRENT_DATE)
+ AND
+ (user_id = %(user_id)s OR team_id = %(team_id)s)
+ """
+ self.env.cr.execute(query, {'user_id': self.user_id.id,
+ 'team_id': self.team_id.id})
+ query_result = self.env.cr.dictfetchone()
+
+ if query_result['total_won'] == 1:
+ message = _('Go, go, go! Congrats for your first deal.')
+ elif query_result['max_team_30'] == self.expected_revenue:
+ message = _('Boom! Team record for the past 30 days.')
+ elif query_result['max_team_7'] == self.expected_revenue:
+ message = _('Yeah! Deal of the last 7 days for the team.')
+ elif query_result['max_user_30'] == self.expected_revenue:
+ message = _('You just beat your personal record for the past 30 days.')
+ elif query_result['max_user_7'] == self.expected_revenue:
+ message = _('You just beat your personal record for the past 7 days.')
+ return message
+
+ def action_schedule_meeting(self):
+ """ Open meeting's calendar view to schedule meeting on current opportunity.
+ :return dict: dictionary value for created Meeting view
+ """
+ self.ensure_one()
+ action = self.env["ir.actions.actions"]._for_xml_id("calendar.action_calendar_event")
+ partner_ids = self.env.user.partner_id.ids
+ if self.partner_id:
+ partner_ids.append(self.partner_id.id)
+ action['context'] = {
+ 'default_opportunity_id': self.id if self.type == 'opportunity' else False,
+ 'default_partner_id': self.partner_id.id,
+ 'default_partner_ids': partner_ids,
+ 'default_attendee_ids': [(0, 0, {'partner_id': pid}) for pid in partner_ids],
+ 'default_team_id': self.team_id.id,
+ 'default_name': self.name,
+ }
+ return action
+
+ def action_snooze(self):
+ self.ensure_one()
+ today = date.today()
+ my_next_activity = self.activity_ids.filtered(lambda activity: activity.user_id == self.env.user)[:1]
+ if my_next_activity:
+ if my_next_activity.date_deadline < today:
+ date_deadline = today + timedelta(days=7)
+ else:
+ date_deadline = my_next_activity.date_deadline + timedelta(days=7)
+ my_next_activity.write({
+ 'date_deadline': date_deadline
+ })
+ return True
+
+ # ------------------------------------------------------------
+ # BUSINESS
+ # ------------------------------------------------------------
+
+ def log_meeting(self, meeting_subject, meeting_date, duration):
+ if not duration:
+ duration = _('unknown')
+ else:
+ duration = str(duration)
+ meet_date = fields.Datetime.from_string(meeting_date)
+ meeting_usertime = fields.Datetime.to_string(fields.Datetime.context_timestamp(self, meet_date))
+ html_time = "<time datetime='%s+00:00'>%s</time>" % (meeting_date, meeting_usertime)
+ message = _("Meeting scheduled at '%s'<br> Subject: %s <br> Duration: %s hours") % (html_time, meeting_subject, duration)
+ return self.message_post(body=message)
+
+ # ------------------------------------------------------------
+ # MERGE LEADS / OPPS
+ # ------------------------------------------------------------
+
+ def _merge_get_result_type(self):
+ """ Define the type of the result of the merge. If at least one of the
+ element to merge is an opp, the resulting new element will be an opp.
+ Otherwise it will be a lead. """
+ if any(record.type == 'opportunity' for record in self):
+ return 'opportunity'
+ return 'lead'
+
+ def _merge_data(self, fields):
+ """ Prepare lead/opp data into a dictionary for merging. Different types
+ of fields are processed in different ways:
+ - text: all the values are concatenated
+ - m2m and o2m: those fields aren't processed
+ - m2o: the first not null value prevails (the other are dropped)
+ - any other type of field: same as m2o
+
+ :param fields: list of fields to process
+ :return dict data: contains the merged values of the new opportunity
+ """
+ # helpers
+ def _get_first_not_null(attr, opportunities):
+ for opp in opportunities:
+ val = opp[attr]
+ if val:
+ return val
+ return False
+
+ def _get_first_not_null_id(attr, opportunities):
+ res = _get_first_not_null(attr, opportunities)
+ return res.id if res else False
+
+ # process the fields' values
+ data = {}
+ for field_name in fields:
+ field = self._fields.get(field_name)
+ if field is None:
+ continue
+ if field.type in ('many2many', 'one2many'):
+ continue
+ elif field.type == 'many2one':
+ data[field_name] = _get_first_not_null_id(field_name, self) # take the first not null
+ elif field.type == 'text':
+ data[field_name] = '\n\n'.join(it for it in self.mapped(field_name) if it)
+ else:
+ data[field_name] = _get_first_not_null(field_name, self)
+
+ # define the resulting type ('lead' or 'opportunity')
+ data['type'] = self._merge_get_result_type()
+ return data
+
+ def _merge_notify_get_merged_fields_message(self, fields):
+ """ Generate the message body with the changed values
+
+ :param fields : list of fields to track
+ :returns a list of message bodies for the corresponding leads
+ """
+ bodies = []
+ for lead in self:
+ title = "%s : %s\n" % (_('Merged opportunity') if lead.type == 'opportunity' else _('Merged lead'), lead.name)
+ body = [title]
+ _fields = self.env['ir.model.fields'].search([
+ ('name', 'in', fields or []),
+ ('model_id.model', '=', lead._name),
+ ])
+ for field in _fields:
+ value = getattr(lead, field.name, False)
+ if field.ttype == 'selection':
+ selections = lead.fields_get()[field.name]['selection']
+ value = next((v[1] for v in selections if v[0] == value), value)
+ elif field.ttype == 'many2one':
+ if value:
+ value = value.sudo().display_name
+ elif field.ttype == 'many2many':
+ if value:
+ value = ','.join(
+ val.display_name
+ for val in value.sudo()
+ )
+ body.append("%s: %s" % (field.field_description, value or ''))
+ bodies.append("<br/>".join(body + ['<br/>']))
+ return bodies
+
+ def _merge_notify(self, opportunities):
+ """ Post a message gathering merged leads/opps informations. It explains
+ which fields has been merged and their new value. `self` is the resulting
+ merge crm.lead record.
+
+ :param opportunities: see ``merge_dependences``
+ """
+ # TODO JEM: mail template should be used instead of fix body, subject text
+ self.ensure_one()
+ # mail message's subject
+ result_type = opportunities._merge_get_result_type()
+ merge_message = _('Merged leads') if result_type == 'lead' else _('Merged opportunities')
+ subject = merge_message + ": " + ", ".join(opportunities.mapped('name'))
+ # message bodies
+ message_bodies = opportunities._merge_notify_get_merged_fields_message(list(CRM_LEAD_FIELDS_TO_MERGE))
+ message_body = "\n\n".join(message_bodies)
+ return self.message_post(body=message_body, subject=subject)
+
+ def _merge_opportunity_history(self, opportunities):
+ """ Move mail.message from the given opportunities to the current one. `self` is the
+ crm.lead record destination for message of `opportunities`.
+
+ :param opportunities: see ``merge_dependences``
+ """
+ self.ensure_one()
+ for opportunity in opportunities:
+ for message in opportunity.message_ids:
+ if message.subject:
+ subject = _("From %(source_name)s : %(source_subject)s", source_name=opportunity.name, source_subject=message.subject)
+ else:
+ subject = _("From %(source_name)s", source_name=opportunity.name)
+ message.write({
+ 'res_id': self.id,
+ 'subject': subject,
+ })
+ return True
+
+ def _merge_opportunity_attachments(self, opportunities):
+ """ Move attachments of given opportunities to the current one `self`, and rename
+ the attachments having same name than native ones.
+
+ :param opportunities: see ``merge_dependences``
+ """
+ self.ensure_one()
+
+ # return attachments of opportunity
+ def _get_attachments(opportunity_id):
+ return self.env['ir.attachment'].search([('res_model', '=', self._name), ('res_id', '=', opportunity_id)])
+
+ first_attachments = _get_attachments(self.id)
+ # counter of all attachments to move. Used to make sure the name is different for all attachments
+ count = 1
+ for opportunity in opportunities:
+ attachments = _get_attachments(opportunity.id)
+ for attachment in attachments:
+ values = {'res_id': self.id}
+ for attachment_in_first in first_attachments:
+ if attachment.name == attachment_in_first.name:
+ values['name'] = "%s (%s)" % (attachment.name, count)
+ count += 1
+ attachment.write(values)
+ return True
+
+ def merge_dependences(self, opportunities):
+ """ Merge dependences (messages, attachments, ...). These dependences will be
+ transfered to `self`, the most important lead.
+
+ :param opportunities : recordset of opportunities to transfer. Does not
+ include `self` which is the target crm.lead being the result of the merge.
+ """
+ self.ensure_one()
+ self._merge_notify(opportunities)
+ self._merge_opportunity_history(opportunities)
+ self._merge_opportunity_attachments(opportunities)
+
+ def merge_opportunity(self, user_id=False, team_id=False, auto_unlink=True):
+ """ Merge opportunities in one. Different cases of merge:
+ - merge leads together = 1 new lead
+ - merge at least 1 opp with anything else (lead or opp) = 1 new opp
+ The resulting lead/opportunity will be the most important one (based on its confidence level)
+ updated with values from other opportunities to merge.
+ :param user_id : the id of the saleperson. If not given, will be determined by `_merge_data`.
+ :param team : the id of the Sales Team. If not given, will be determined by `_merge_data`.
+ :return crm.lead record resulting of th merge
+ """
+ if len(self.ids) <= 1:
+ raise UserError(_('Please select more than one element (lead or opportunity) from the list view.'))
+
+ if len(self.ids) > 5 and not self.env.is_superuser():
+ raise UserError(_("To prevent data loss, Leads and Opportunities can only be merged by groups of 5."))
+
+ opportunities = self._sort_by_confidence_level(reverse=True)
+
+ # get SORTED recordset of head and tail, and complete list
+ opportunities_head = opportunities[0]
+ opportunities_tail = opportunities[1:]
+
+ # merge all the sorted opportunity. This means the value of
+ # the first (head opp) will be a priority.
+ merged_data = opportunities._merge_data(list(CRM_LEAD_FIELDS_TO_MERGE))
+
+ # force value for saleperson and Sales Team
+ if user_id:
+ merged_data['user_id'] = user_id
+ if team_id:
+ merged_data['team_id'] = team_id
+
+ # merge other data (mail.message, attachments, ...) from tail into head
+ opportunities_head.merge_dependences(opportunities_tail)
+
+ # check if the stage is in the stages of the Sales Team. If not, assign the stage with the lowest sequence
+ if merged_data.get('team_id'):
+ team_stage_ids = self.env['crm.stage'].search(['|', ('team_id', '=', merged_data['team_id']), ('team_id', '=', False)], order='sequence')
+ if merged_data.get('stage_id') not in team_stage_ids.ids:
+ merged_data['stage_id'] = team_stage_ids[0].id if team_stage_ids else False
+
+ # write merged data into first opportunity
+ opportunities_head.write(merged_data)
+
+ # delete tail opportunities
+ # we use the SUPERUSER to avoid access rights issues because as the user had the rights to see the records it should be safe to do so
+ if auto_unlink:
+ opportunities_tail.sudo().unlink()
+
+ return opportunities_head
+
+ def _sort_by_confidence_level(self, reverse=False):
+ """ Sorting the leads/opps according to the confidence level of its stage, which relates to the probability of winning it
+ The confidence level increases with the stage sequence
+ An Opportunity always has higher confidence level than a lead
+ """
+ def opps_key(opportunity):
+ return opportunity.type == 'opportunity', opportunity.stage_id.sequence, -opportunity._origin.id
+
+ return self.sorted(key=opps_key, reverse=reverse)
+
+ def _convert_opportunity_data(self, customer, team_id=False):
+ """ Extract the data from a lead to create the opportunity
+ :param customer : res.partner record
+ :param team_id : identifier of the Sales Team to determine the stage
+ """
+ new_team_id = team_id if team_id else self.team_id.id
+ upd_values = {
+ 'type': 'opportunity',
+ 'date_open': fields.Datetime.now(),
+ 'date_conversion': fields.Datetime.now(),
+ }
+ if customer != self.partner_id:
+ upd_values['partner_id'] = customer.id if customer else False
+ if not self.stage_id:
+ stage = self._stage_find(team_id=new_team_id)
+ upd_values['stage_id'] = stage.id
+ return upd_values
+
+ def convert_opportunity(self, partner_id, user_ids=False, team_id=False):
+ customer = False
+ if partner_id:
+ customer = self.env['res.partner'].browse(partner_id)
+ for lead in self:
+ if not lead.active or lead.probability == 100:
+ continue
+ vals = lead._convert_opportunity_data(customer, team_id)
+ lead.write(vals)
+
+ if user_ids or team_id:
+ self.handle_salesmen_assignment(user_ids, team_id)
+
+ return True
+
+ def _get_lead_duplicates(self, partner=None, email=None, include_lost=False):
+ """ Search for leads that seem duplicated based on partner / email.
+
+ :param partner : optional customer when searching duplicated
+ :param email: email (possibly formatted) to search
+ :param boolean include_lost: if True, search includes archived opportunities
+ (still only active leads are considered). If False, search for active
+ and not won leads and opportunities;
+ """
+ if not email and not partner:
+ return self.env['crm.lead']
+
+ domain = []
+ for normalized_email in [tools.email_normalize(email) for email in tools.email_split(email)]:
+ domain.append(('email_normalized', '=', normalized_email))
+ if partner:
+ domain.append(('partner_id', '=', partner.id))
+
+ if not domain:
+ return self.env['crm.lead']
+
+ domain = ['|'] * (len(domain) - 1) + domain
+ if include_lost:
+ domain += ['|', ('type', '=', 'opportunity'), ('active', '=', True)]
+ else:
+ domain += ['&', ('active', '=', True), '|', ('probability', '=', False), ('probability', '<', 100)]
+
+ return self.with_context(active_test=False).search(domain)
+
+ def _create_customer(self):
+ """ Create a partner from lead data and link it to the lead.
+
+ :return: newly-created partner browse record
+ """
+ Partner = self.env['res.partner']
+ contact_name = self.contact_name
+ if not contact_name:
+ contact_name = Partner._parse_partner_name(self.email_from)[0] if self.email_from else False
+
+ if self.partner_name:
+ partner_company = Partner.create(self._prepare_customer_values(self.partner_name, is_company=True))
+ elif self.partner_id:
+ partner_company = self.partner_id
+ else:
+ partner_company = None
+
+ if contact_name:
+ return Partner.create(self._prepare_customer_values(contact_name, is_company=False, parent_id=partner_company.id if partner_company else False))
+
+ if partner_company:
+ return partner_company
+ return Partner.create(self._prepare_customer_values(self.name, is_company=False))
+
+ def _prepare_customer_values(self, partner_name, is_company=False, parent_id=False):
+ """ Extract data from lead to create a partner.
+
+ :param name : furtur name of the partner
+ :param is_company : True if the partner is a company
+ :param parent_id : id of the parent partner (False if no parent)
+
+ :return: dictionary of values to give at res_partner.create()
+ """
+ email_split = tools.email_split(self.email_from)
+ res = {
+ 'name': partner_name,
+ 'user_id': self.env.context.get('default_user_id') or self.user_id.id,
+ 'comment': self.description,
+ 'team_id': self.team_id.id,
+ 'parent_id': parent_id,
+ 'phone': self.phone,
+ 'mobile': self.mobile,
+ 'email': email_split[0] if email_split else False,
+ 'title': self.title.id,
+ 'function': self.function,
+ 'street': self.street,
+ 'street2': self.street2,
+ 'zip': self.zip,
+ 'city': self.city,
+ 'country_id': self.country_id.id,
+ 'state_id': self.state_id.id,
+ 'website': self.website,
+ 'is_company': is_company,
+ 'type': 'contact'
+ }
+ if self.lang_id:
+ res['lang'] = self.lang_id.code
+ return res
+
+ def _find_matching_partner(self, email_only=False):
+ """ Try to find a matching partner with available information on the
+ lead, using notably customer's name, email, ...
+
+ :param email_only: Only find a matching based on the email. To use
+ for automatic process where ilike based on name can be too dangerous
+ :return: partner browse record
+ """
+ self.ensure_one()
+ partner = self.partner_id
+
+ if not partner and self.email_from:
+ partner = self.env['res.partner'].search([('email', '=', self.email_from)], limit=1)
+
+ if not partner and not email_only:
+ # search through the existing partners based on the lead's partner or contact name
+ # to be aligned with _create_customer, search on lead's name as last possibility
+ for customer_potential_name in [self[field_name] for field_name in ['partner_name', 'contact_name', 'name'] if self[field_name]]:
+ partner = self.env['res.partner'].search([('name', 'ilike', '%' + customer_potential_name + '%')], limit=1)
+ if partner:
+ break
+
+ return partner
+
+ def handle_partner_assignment(self, force_partner_id=False, create_missing=True):
+ """ Update customer (partner_id) of leads. Purpose is to set the same
+ partner on most leads; either through a newly created partner either
+ through a given partner_id.
+
+ :param int force_partner_id: if set, update all leads to that customer;
+ :param create_missing: for leads without customer, create a new one
+ based on lead information;
+ """
+ for lead in self:
+ if force_partner_id:
+ lead.partner_id = force_partner_id
+ if not lead.partner_id and create_missing:
+ partner = lead._create_customer()
+ lead.partner_id = partner.id
+
+ def handle_salesmen_assignment(self, user_ids=None, team_id=False):
+ """ Assign salesmen and salesteam to a batch of leads. If there are more
+ leads than salesmen, these salesmen will be assigned in round-robin. E.g.
+ 4 salesmen (S1, S2, S3, S4) for 6 leads (L1, L2, ... L6) will assigned as
+ following: L1 - S1, L2 - S2, L3 - S3, L4 - S4, L5 - S1, L6 - S2.
+
+ :param list user_ids: salesmen to assign
+ :param int team_id: salesteam to assign
+ """
+ update_vals = {'team_id': team_id} if team_id else {}
+ if not user_ids:
+ self.write(update_vals)
+ else:
+ lead_ids = self.ids
+ steps = len(user_ids)
+ # pass 1 : lead_ids[0:6:3] = [L1,L4]
+ # pass 2 : lead_ids[1:6:3] = [L2,L5]
+ # pass 3 : lead_ids[2:6:3] = [L3,L6]
+ # ...
+ for idx in range(0, steps):
+ subset_ids = lead_ids[idx:len(lead_ids):steps]
+ update_vals['user_id'] = user_ids[idx]
+ self.env['crm.lead'].browse(subset_ids).write(update_vals)
+
+ # ------------------------------------------------------------
+ # TOOLS
+ # ------------------------------------------------------------
+
+ def redirect_lead_opportunity_view(self):
+ self.ensure_one()
+ return {
+ 'name': _('Lead or Opportunity'),
+ 'view_mode': 'form',
+ 'res_model': 'crm.lead',
+ 'domain': [('type', '=', self.type)],
+ 'res_id': self.id,
+ 'view_id': False,
+ 'type': 'ir.actions.act_window',
+ 'context': {'default_type': self.type}
+ }
+
+ @api.model
+ def get_empty_list_help(self, help):
+ help_title, sub_title = "", ""
+ if self._context.get('default_type') == 'lead':
+ help_title = _('Create a new lead')
+ else:
+ help_title = _('Create an opportunity to start playing with your pipeline.')
+ alias_record = self.env['mail.alias'].search([
+ ('alias_name', '!=', False),
+ ('alias_name', '!=', ''),
+ ('alias_model_id.model', '=', 'crm.lead'),
+ ('alias_parent_model_id.model', '=', 'crm.team'),
+ ('alias_force_thread_id', '=', False)
+ ], limit=1)
+ if alias_record and alias_record.alias_domain and alias_record.alias_name:
+ email = '%s@%s' % (alias_record.alias_name, alias_record.alias_domain)
+ email_link = "<b><a href='mailto:%s'>%s</a></b>" % (email, email)
+ sub_title = _('Use the top left <i>Create</i> button, or send an email to %s to test the email gateway.') % (email_link)
+ return '<p class="o_view_nocontent_smiling_face">%s</p><p class="oe_view_nocontent_alias">%s</p>' % (help_title, sub_title)
+
+ # ------------------------------------------------------------
+ # MAILING
+ # ------------------------------------------------------------
+
+ def _creation_subtype(self):
+ return self.env.ref('crm.mt_lead_create')
+
+ def _track_subtype(self, init_values):
+ self.ensure_one()
+ if 'stage_id' in init_values and self.probability == 100 and self.stage_id:
+ return self.env.ref('crm.mt_lead_won')
+ elif 'lost_reason' in init_values and self.lost_reason:
+ return self.env.ref('crm.mt_lead_lost')
+ elif 'stage_id' in init_values:
+ return self.env.ref('crm.mt_lead_stage')
+ elif 'active' in init_values and self.active:
+ return self.env.ref('crm.mt_lead_restored')
+ elif 'active' in init_values and not self.active:
+ return self.env.ref('crm.mt_lead_lost')
+ return super(Lead, self)._track_subtype(init_values)
+
+ def _notify_get_groups(self, msg_vals=None):
+ """ Handle salesman recipients that can convert leads into opportunities
+ and set opportunities as won / lost. """
+ groups = super(Lead, self)._notify_get_groups(msg_vals=msg_vals)
+ local_msg_vals = dict(msg_vals or {})
+
+ self.ensure_one()
+ if self.type == 'lead':
+ convert_action = self._notify_get_action_link('controller', controller='/lead/convert', **local_msg_vals)
+ salesman_actions = [{'url': convert_action, 'title': _('Convert to opportunity')}]
+ else:
+ won_action = self._notify_get_action_link('controller', controller='/lead/case_mark_won', **local_msg_vals)
+ lost_action = self._notify_get_action_link('controller', controller='/lead/case_mark_lost', **local_msg_vals)
+ salesman_actions = [
+ {'url': won_action, 'title': _('Won')},
+ {'url': lost_action, 'title': _('Lost')}]
+
+ if self.team_id:
+ custom_params = dict(local_msg_vals, res_id=self.team_id.id, model=self.team_id._name)
+ salesman_actions.append({
+ 'url': self._notify_get_action_link('view', **custom_params),
+ 'title': _('Sales Team Settings')
+ })
+
+ salesman_group_id = self.env.ref('sales_team.group_sale_salesman').id
+ new_group = (
+ 'group_sale_salesman', lambda pdata: pdata['type'] == 'user' and salesman_group_id in pdata['groups'], {
+ 'actions': salesman_actions,
+ })
+
+ return [new_group] + groups
+
+ def _notify_get_reply_to(self, default=None, records=None, company=None, doc_names=None):
+ """ Override to set alias of lead and opportunities to their sales team if any. """
+ aliases = self.mapped('team_id').sudo()._notify_get_reply_to(default=default, records=None, company=company, doc_names=None)
+ res = {lead.id: aliases.get(lead.team_id.id) for lead in self}
+ leftover = self.filtered(lambda rec: not rec.team_id)
+ if leftover:
+ res.update(super(Lead, leftover)._notify_get_reply_to(default=default, records=None, company=company, doc_names=doc_names))
+ return res
+
+ def _message_get_default_recipients(self):
+ return {r.id: {
+ 'partner_ids': [],
+ 'email_to': r.email_normalized,
+ 'email_cc': False}
+ for r in self}
+
+ def _message_get_suggested_recipients(self):
+ recipients = super(Lead, self)._message_get_suggested_recipients()
+ try:
+ for lead in self:
+ if lead.partner_id:
+ lead._message_add_suggested_recipient(recipients, partner=lead.partner_id, reason=_('Customer'))
+ elif lead.email_from:
+ lead._message_add_suggested_recipient(recipients, email=lead.email_from, reason=_('Customer Email'))
+ except AccessError: # no read access rights -> just ignore suggested recipients because this imply modifying followers
+ pass
+ return recipients
+
+ @api.model
+ def message_new(self, msg_dict, custom_values=None):
+ """ Overrides mail_thread message_new that is called by the mailgateway
+ through message_process.
+ This override updates the document according to the email.
+ """
+
+ # remove external users
+ if self.env.user.has_group('base.group_portal'):
+ self = self.with_context(default_user_id=False)
+
+ # remove default author when going through the mail gateway. Indeed we
+ # do not want to explicitly set user_id to False; however we do not
+ # want the gateway user to be responsible if no other responsible is
+ # found.
+ if self._uid == self.env.ref('base.user_root').id:
+ self = self.with_context(default_user_id=False)
+
+ if custom_values is None:
+ custom_values = {}
+ defaults = {
+ 'name': msg_dict.get('subject') or _("No Subject"),
+ 'email_from': msg_dict.get('from'),
+ 'partner_id': msg_dict.get('author_id', False),
+ }
+ if msg_dict.get('priority') in dict(crm_stage.AVAILABLE_PRIORITIES):
+ defaults['priority'] = msg_dict.get('priority')
+ defaults.update(custom_values)
+
+ # assign right company
+ if 'company_id' not in defaults and 'team_id' in defaults:
+ defaults['company_id'] = self.env['crm.team'].browse(defaults['team_id']).company_id.id
+ return super(Lead, self).message_new(msg_dict, custom_values=defaults)
+
+ def _message_post_after_hook(self, message, msg_vals):
+ if self.email_from and not self.partner_id:
+ # we consider that posting a message with a specified recipient (not a follower, a specific one)
+ # on a document without customer means that it was created through the chatter using
+ # suggested recipients. This heuristic allows to avoid ugly hacks in JS.
+ new_partner = message.partner_ids.filtered(lambda partner: partner.email == self.email_from)
+ if new_partner:
+ self.search([
+ ('partner_id', '=', False),
+ ('email_from', '=', new_partner.email),
+ ('stage_id.fold', '=', False)]).write({'partner_id': new_partner.id})
+ return super(Lead, self)._message_post_after_hook(message, msg_vals)
+
+ def _message_partner_info_from_emails(self, emails, link_mail=False):
+ result = super(Lead, self)._message_partner_info_from_emails(emails, link_mail=link_mail)
+ for partner_info in result:
+ if not partner_info.get('partner_id') and (self.partner_name or self.contact_name):
+ emails = email_re.findall(partner_info['full_name'] or '')
+ email = emails and emails[0] or ''
+ if email and self.email_from and email.lower() == self.email_from.lower():
+ partner_info['full_name'] = tools.formataddr((self.contact_name or self.partner_name, email))
+ break
+ return result
+
+ def _phone_get_number_fields(self):
+ """ Use mobile or phone fields to compute sanitized phone number """
+ return ['mobile', 'phone']
+
+ @api.model
+ def get_import_templates(self):
+ return [{
+ 'label': _('Import Template for Leads & Opportunities'),
+ 'template': '/crm/static/xls/crm_lead.xls'
+ }]
+
+ # ------------------------------------------------------------
+ # PLS
+ # ------------------------------------------------------------
+ # Predictive lead scoring is computing the lead probability, based on won and lost leads from the past
+ # Each won/lost lead increments a frequency table, where we store, for each field/value couple, the number of
+ # won and lost leads.
+ # E.g. : A won lead from Belgium will increase the won count of the frequency country_id='Belgium' by 1.
+ # The frequencies are split by team_id, so each team has his own frequencies environment. (Team A doesn't impact B)
+ # There are two main ways to build the frequency table:
+ # - Live Increment: At each Won/lost, we increment directly the frequencies based on the lead values.
+ # Done right BEFORE writing the lead as won or lost.
+ # We consider a lead that will be marked as won or lost.
+ # Used each time a lead is won or lost, to ensure frequency table is always up to date
+ # - One shot Rebuild: empty the frequency table and rebuild it from scratch, based on every already won/lost leads
+ # Done during cron process.
+ # We consider all the leads that have been already won or lost.
+ # Used in one shot, when modifying the criteria to take into account (fields or reference date)
+
+ # ---------------------------------
+ # PLS: Probability Computation
+ # ---------------------------------
+ def _pls_get_naive_bayes_probabilities(self, batch_mode=False):
+ """
+ In machine learning, naive Bayes classifiers (NBC) are a family of simple "probabilistic classifiers" based on
+ applying Bayes theorem with strong (naive) independence assumptions between the variables taken into account.
+ E.g: will TDE eat m&m's depending on his sleep status, the amount of work he has and the fullness of his stomach?
+ As we use experience to compute the statistics, every day, we will register the variables state + the result.
+ As the days pass, we will be able to determine, with more and more precision, if TDE will eat m&m's
+ for a specific combination :
+ - did sleep very well, a lot of work and stomach full > Will never happen !
+ - didn't sleep at all, no work at all and empty stomach > for sure !
+ Following Bayes' Theorem: the probability that an event occurs (to win) under certain conditions is proportional
+ to the probability to win under each condition separately and the probability to win. We compute a 'Win score'
+ -> P(Won | A∩B) ∝ P(A∩B | Won)*P(Won) OR S(Won | A∩B) = P(A∩B | Won)*P(Won)
+ To compute a percentage of probability to win, we also compute the 'Lost score' that is proportional to the
+ probability to lose under each condition separately and the probability to lose.
+ -> Probability = S(Won | A∩B) / ( S(Won | A∩B) + S(Lost | A∩B) )
+ See https://www.youtube.com/watch?v=CPqOCI0ahss can help to get a quick and simple example.
+ One issue about NBC is when a event occurence is never observed.
+ E.g: if when TDE has an empty stomach, he always eat m&m's, than the "not eating m&m's when empty stomach' event
+ will never be observed.
+ This is called 'zero frequency' and that leads to division (or at least multiplication) by zero.
+ To avoid this, we add 0.1 in each frequency. With few data, the computation is than not really realistic.
+ The more we have records to analyse, the more the estimation will be precise.
+ :return: probability in percent (and integer rounded) that the lead will be won at the current stage.
+ """
+ lead_probabilities = {}
+ if not self:
+ return lead_probabilities
+
+ # Get all leads values, no matter the team_id
+ domain = []
+ if batch_mode:
+ domain = [
+ '&',
+ ('active', '=', True), ('id', 'in', self.ids),
+ '|',
+ ('probability', '=', None),
+ '&',
+ ('probability', '<', 100), ('probability', '>', 0)
+ ]
+ leads_values_dict = self._pls_get_lead_pls_values(domain=domain)
+
+ if not leads_values_dict:
+ return lead_probabilities
+
+ # Get unique couples to search in frequency table and won leads.
+ leads_fields = set() # keep unique fields, as a lead can have multiple tag_ids
+ won_leads = set()
+ won_stage_ids = self.env['crm.stage'].search([('is_won', '=', True)]).ids
+ for lead_id, values in leads_values_dict.items():
+ for field, value in values['values']:
+ if field == 'stage_id' and value in won_stage_ids:
+ won_leads.add(lead_id)
+ leads_fields.add(field)
+
+ # get all variable related records from frequency table, no matter the team_id
+ frequencies = self.env['crm.lead.scoring.frequency'].search([('variable', 'in', list(leads_fields))], order="team_id asc")
+
+ # get all team_ids from frequencies
+ frequency_teams = frequencies.mapped('team_id')
+ frequency_team_ids = [0] + [team.id for team in frequency_teams]
+
+ # 1. Compute each variable value count individually
+ # regroup each variable to be able to compute their own probabilities
+ # As all the variable does not enter into account (as we reject unset values in the process)
+ # each value probability must be computed only with their own variable related total count
+ # special case: for lead for which team_id is not in frequency table,
+ # we consider all the records, independently from team_id (this is why we add a result[-1])
+ result = dict((team_id, dict((field, dict(won_total=0, lost_total=0)) for field in leads_fields)) for team_id in frequency_team_ids)
+ result[-1] = dict((field, dict(won_total=0, lost_total=0)) for field in leads_fields)
+ for frequency in frequencies:
+ team_result = result[frequency.team_id.id if frequency.team_id else 0]
+
+ field = frequency['variable']
+ value = frequency['value']
+
+ # To avoid that a tag take to much importance if his subset is too small,
+ # we ignore the tag frequencies if we have less than 50 won or lost for this tag.
+ if field == 'tag_id' and (frequency['won_count'] + frequency['lost_count']) < 50:
+ continue
+
+ team_result[field][value] = {'won': frequency['won_count'], 'lost': frequency['lost_count']}
+ team_result[field]['won_total'] += frequency['won_count']
+ team_result[field]['lost_total'] += frequency['lost_count']
+
+ if value not in result[-1][field]:
+ result[-1][field][value] = {'won': 0, 'lost': 0}
+ result[-1][field][value]['won'] += frequency['won_count']
+ result[-1][field][value]['lost'] += frequency['lost_count']
+ result[-1][field]['won_total'] += frequency['won_count']
+ result[-1][field]['lost_total'] += frequency['lost_count']
+
+ # Get all won, lost and total count for all records in frequencies per team_id
+ for team_id in result:
+ result[team_id]['team_won'], \
+ result[team_id]['team_lost'], \
+ result[team_id]['team_total'] = self._pls_get_won_lost_total_count(result[team_id])
+
+ save_team_id = None
+ p_won, p_lost = 1, 1
+ for lead_id, lead_values in leads_values_dict.items():
+ # if stage_id is null, return 0 and bypass computation
+ lead_fields = [value[0] for value in lead_values.get('values', [])]
+ if not 'stage_id' in lead_fields:
+ lead_probabilities[lead_id] = 0
+ continue
+ # if lead stage is won, return 100
+ elif lead_id in won_leads:
+ lead_probabilities[lead_id] = 100
+ continue
+
+ lead_team_id = lead_values['team_id'] if lead_values['team_id'] else 0 # team_id = None -> Convert to 0
+ lead_team_id = lead_team_id if lead_team_id in result else -1 # team_id not in frequency Table -> convert to -1
+ if lead_team_id != save_team_id:
+ save_team_id = lead_team_id
+ team_won = result[save_team_id]['team_won']
+ team_lost = result[save_team_id]['team_lost']
+ team_total = result[save_team_id]['team_total']
+ # if one count = 0, we cannot compute lead probability
+ if not team_won or not team_lost:
+ continue
+ p_won = team_won / team_total
+ p_lost = team_lost / team_total
+
+ # 2. Compute won and lost score using each variable's individual probability
+ s_lead_won, s_lead_lost = p_won, p_lost
+ for field, value in lead_values['values']:
+ field_result = result.get(save_team_id, {}).get(field)
+ value = value.origin if hasattr(value, 'origin') else value
+ value_result = field_result.get(str(value)) if field_result else False
+ if value_result:
+ total_won = team_won if field == 'stage_id' else field_result['won_total']
+ total_lost = team_lost if field == 'stage_id' else field_result['lost_total']
+
+ s_lead_won *= value_result['won'] / total_won
+ s_lead_lost *= value_result['lost'] / total_lost
+
+ # 3. Compute Probability to win
+ lead_probabilities[lead_id] = round(100 * s_lead_won / (s_lead_won + s_lead_lost), 2)
+ return lead_probabilities
+
+ # ---------------------------------
+ # PLS: Live Increment
+ # ---------------------------------
+ def _pls_increment_frequencies(self, from_state=None, to_state=None):
+ """
+ When losing or winning a lead, this method is called to increment each PLS parameter related to the lead
+ in won_count (if won) or in lost_count (if lost).
+
+ This method is also used when reactivating a mistakenly lost lead (using the decrement argument).
+ In this case, the lost count should be de-increment by 1 for each PLS parameter linked ot the lead.
+
+ Live increment must be done before writing the new values because we need to know the state change (from and to).
+ This would not be an issue for the reach won or reach lost as we just need to increment the frequencies with the
+ final state of the lead.
+ This issue is when the lead leaves a closed state because once the new values have been writen, we do not know
+ what was the previous state that we need to decrement.
+ This is why 'is_won' and 'decrement' parameters are used to describe the from / to change of his state.
+ """
+ new_frequencies_by_team, existing_frequencies_by_team = self._pls_prepare_update_frequency_table(target_state=from_state or to_state)
+
+ # update frequency table
+ self._pls_update_frequency_table(new_frequencies_by_team, 1 if to_state else -1,
+ existing_frequencies_by_team=existing_frequencies_by_team)
+
+ # ---------------------------------
+ # PLS: One shot rebuild
+ # ---------------------------------
+ def _cron_update_automated_probabilities(self):
+ """ This cron will :
+ - rebuild the lead scoring frequency table
+ - recompute all the automated_probability and align probability if both were aligned
+ """
+ cron_start_date = datetime.now()
+ self._rebuild_pls_frequency_table()
+ self._update_automated_probabilities()
+ _logger.info("Predictive Lead Scoring : Cron duration = %d seconds" % ((datetime.now() - cron_start_date).total_seconds()))
+
+ def _rebuild_pls_frequency_table(self):
+ # Clear the frequencies table (in sql to speed up the cron)
+ try:
+ self.check_access_rights('unlink')
+ except AccessError:
+ raise UserError(_("You don't have the access needed to run this cron."))
+ else:
+ self._cr.execute('TRUNCATE TABLE crm_lead_scoring_frequency')
+
+ new_frequencies_by_team, unused = self._pls_prepare_update_frequency_table(rebuild=True)
+ # update frequency table
+ self._pls_update_frequency_table(new_frequencies_by_team, 1)
+
+ _logger.info("Predictive Lead Scoring : crm.lead.scoring.frequency table rebuilt")
+
+ def _update_automated_probabilities(self):
+ """ Recompute all the automated_probability (and align probability if both were aligned) for all the leads
+ that are active (not won, nor lost).
+
+ For performance matter, as there can be a huge amount of leads to recompute, this cron proceed by batch.
+ Each batch is performed into its own transaction, in order to minimise the lock time on the lead table
+ (and to avoid complete lock if there was only 1 transaction that would last for too long -> several minutes).
+ If a concurrent update occurs, it will simply be put in the queue to get the lock.
+ """
+ pls_start_date = self._pls_get_safe_start_date()
+ if not pls_start_date:
+ return
+
+ # 1. Get all the leads to recompute created after pls_start_date that are nor won nor lost
+ # (Won : probability = 100 | Lost : probability = 0 or inactive. Here, inactive won't be returned anyway)
+ # Get also all the lead without probability --> These are the new leads. Activate auto probability on them.
+ pending_lead_domain = [
+ '&',
+ '&',
+ ('stage_id', '!=', False), ('create_date', '>=', pls_start_date),
+ '|',
+ ('probability', '=', False),
+ '&',
+ ('probability', '<', 100), ('probability', '>', 0)
+ ]
+ leads_to_update = self.env['crm.lead'].search(pending_lead_domain)
+ leads_to_update_count = len(leads_to_update)
+
+ # 2. Compute by batch to avoid memory error
+ lead_probabilities = {}
+ for i in range(0, leads_to_update_count, PLS_COMPUTE_BATCH_STEP):
+ leads_to_update_part = leads_to_update[i:i + PLS_COMPUTE_BATCH_STEP]
+ lead_probabilities.update(leads_to_update_part._pls_get_naive_bayes_probabilities(batch_mode=True))
+ _logger.info("Predictive Lead Scoring : New automated probabilities computed")
+
+ # 3. Group by new probability to reduce server roundtrips when executing the update
+ probability_leads = defaultdict(list)
+ for lead_id, probability in sorted(lead_probabilities.items()):
+ probability_leads[probability].append(lead_id)
+
+ # 4. Update automated_probability (+ probability if both were equal)
+ update_sql = """UPDATE crm_lead
+ SET automated_probability = %s,
+ probability = CASE WHEN (probability = automated_probability OR probability is null)
+ THEN (%s)
+ ELSE (probability)
+ END
+ WHERE id in %s"""
+
+ # Update by a maximum number of leads at the same time, one batch by transaction :
+ # - avoid memory errors
+ # - avoid blocking the table for too long with a too big transaction
+ transactions_count, transactions_failed_count = 0, 0
+ cron_update_lead_start_date = datetime.now()
+ auto_commit = not getattr(threading.currentThread(), 'testing', False)
+ for probability, probability_lead_ids in probability_leads.items():
+ for lead_ids_current in tools.split_every(PLS_UPDATE_BATCH_STEP, probability_lead_ids):
+ transactions_count += 1
+ try:
+ self.env.cr.execute(update_sql, (probability, probability, tuple(lead_ids_current)))
+ # auto-commit except in testing mode
+ if auto_commit:
+ self.env.cr.commit()
+ except Exception as e:
+ _logger.warning("Predictive Lead Scoring : update transaction failed. Error: %s" % e)
+ transactions_failed_count += 1
+
+ _logger.info(
+ "Predictive Lead Scoring : All automated probabilities updated (%d leads / %d transactions (%d failed) / %d seconds)" % (
+ leads_to_update_count,
+ transactions_count,
+ transactions_failed_count,
+ (datetime.now() - cron_update_lead_start_date).total_seconds(),
+ )
+ )
+
+ # ---------------------------------
+ # PLS: Common parts for both mode
+ # ---------------------------------
+ def _pls_prepare_update_frequency_table(self, rebuild=False, target_state=False):
+ """
+ This method is common to Live Increment or Full Rebuild mode, as it shares the main steps.
+ This method will prepare the frequency dict needed to update the frequency table:
+ - New frequencies: frequencies that we need to add in the frequency table.
+ - Existing frequencies: frequencies that are already in the frequency table.
+ In rebuild mode, only the new frequencies are needed as existing frequencies are truncated.
+ For each team, each dict contains the frequency in won and lost for each field/value couple
+ of the target leads.
+ Target leads are :
+ - in Live increment mode : given ongoing leads (self)
+ - in Full rebuild mode : all the closed (won and lost) leads in the DB.
+ During the frequencies update, with both new and existing frequencies, we can split frequencies to update
+ and frequencies to add. If a field/value couple already exists in the frequency table, we just update it.
+ Otherwise, we need to insert a new one.
+ """
+ # Keep eligible leads
+ pls_start_date = self._pls_get_safe_start_date()
+ if not pls_start_date:
+ return {}, {}
+
+ if rebuild: # rebuild will treat every closed lead in DB, increment will treat current ongoing leads
+ pls_leads = self
+ else:
+ # Only treat leads created after the PLS start Date
+ pls_leads = self.filtered(
+ lambda lead: fields.Date.to_date(pls_start_date) <= fields.Date.to_date(lead.create_date))
+ if not pls_leads:
+ return {}, {}
+
+ # Extract target leads values
+ if rebuild: # rebuild is ok
+ domain = [
+ '&',
+ ('create_date', '>=', pls_start_date),
+ '|',
+ ('probability', '=', 100),
+ '&',
+ ('probability', '=', 0), ('active', '=', False)
+ ]
+ team_ids = self.env['crm.team'].with_context(active_test=False).search([]).ids + [0] # If team_id is unset, consider it as team 0
+ else: # increment
+ domain = [('id', 'in', pls_leads.ids)]
+ team_ids = pls_leads.mapped('team_id').ids + [0]
+
+ leads_values_dict = pls_leads._pls_get_lead_pls_values(domain=domain)
+
+ # split leads values by team_id
+ # get current frequencies related to the target leads
+ leads_frequency_values_by_team = dict((team_id, []) for team_id in team_ids)
+ leads_pls_fields = set() # ensure to keep each field unique (can have multiple tag_id leads_values_dict)
+ for lead_id, values in leads_values_dict.items():
+ team_id = values.get('team_id', 0) # If team_id is unset, consider it as team 0
+ lead_frequency_values = {'count': 1}
+ for field, value in values['values']:
+ if field != "probability": # was added to lead values in batch mode to know won/lost state, but is not a pls fields.
+ leads_pls_fields.add(field)
+ else: # extract lead probability - needed to increment tag_id frequency. (proba always before tag_id)
+ lead_probability = value
+ if field == 'tag_id': # handle tag_id separatelly (as in One Shot rebuild mode)
+ leads_frequency_values_by_team[team_id].append({field: value, 'count': 1, 'probability': lead_probability})
+ else:
+ lead_frequency_values[field] = value
+ leads_frequency_values_by_team[team_id].append(lead_frequency_values)
+ leads_pls_fields = list(leads_pls_fields)
+
+ # get new frequencies
+ new_frequencies_by_team = {}
+ for team_id in team_ids:
+ # prepare fields and tag values for leads by team
+ new_frequencies_by_team[team_id] = self._pls_prepare_frequencies(
+ leads_frequency_values_by_team[team_id], leads_pls_fields, target_state=target_state)
+
+ # get existing frequencies
+ existing_frequencies_by_team = {}
+ if not rebuild: # there is no existing frequency in rebuild mode as they were all deleted.
+ # read all fields to get everything in memory in one query (instead of having query + prefetch)
+ existing_frequencies = self.env['crm.lead.scoring.frequency'].search_read(
+ ['&', ('variable', 'in', leads_pls_fields),
+ '|', ('team_id', 'in', pls_leads.mapped('team_id').ids), ('team_id', '=', False)])
+ for frequency in existing_frequencies:
+ team_id = frequency['team_id'][0] if frequency.get('team_id') else 0
+ if team_id not in existing_frequencies_by_team:
+ existing_frequencies_by_team[team_id] = dict((field, {}) for field in leads_pls_fields)
+
+ existing_frequencies_by_team[team_id][frequency['variable']][frequency['value']] = {
+ 'frequency_id': frequency['id'],
+ 'won': frequency['won_count'],
+ 'lost': frequency['lost_count']
+ }
+
+ return new_frequencies_by_team, existing_frequencies_by_team
+
+ def _pls_update_frequency_table(self, new_frequencies_by_team, step, existing_frequencies_by_team=None):
+ """ Create / update the frequency table in a cross company way, per team_id"""
+ values_to_update = {}
+ values_to_create = []
+ if not existing_frequencies_by_team:
+ existing_frequencies_by_team = {}
+ # build the create multi + frequencies to update
+ for team_id, new_frequencies in new_frequencies_by_team.items():
+ for field, value in new_frequencies.items():
+ # frequency already present ?
+ current_frequencies = existing_frequencies_by_team.get(team_id, {})
+ for param, result in value.items():
+ current_frequency_for_couple = current_frequencies.get(field, {}).get(param, {})
+ # If frequency already present : UPDATE IT
+ if current_frequency_for_couple:
+ new_won = current_frequency_for_couple['won'] + (result['won'] * step)
+ new_lost = current_frequency_for_couple['lost'] + (result['lost'] * step)
+ # ensure to have always positive frequencies
+ values_to_update[current_frequency_for_couple['frequency_id']] = {
+ 'won_count': new_won if new_won > 0 else 0.1,
+ 'lost_count': new_lost if new_lost > 0 else 0.1
+ }
+ continue
+
+ # Else, CREATE a new frequency record.
+ # We add + 0.1 in won and lost counts to avoid zero frequency issues
+ # should be +1 but it weights too much on small recordset.
+ values_to_create.append({
+ 'variable': field,
+ 'value': param,
+ 'won_count': result['won'] + 0.1,
+ 'lost_count': result['lost'] + 0.1,
+ 'team_id': team_id if team_id else None # team_id = 0 means no team_id
+ })
+
+ LeadScoringFrequency = self.env['crm.lead.scoring.frequency'].sudo()
+ for frequency_id, values in values_to_update.items():
+ LeadScoringFrequency.browse(frequency_id).write(values)
+
+ if values_to_create:
+ LeadScoringFrequency.create(values_to_create)
+
+ # ---------------------------------
+ # Utility Tools for PLS
+ # ---------------------------------
+
+ # PLS: Config Parameters
+ # ---------------------
+ def _pls_get_safe_start_date(self):
+ """ As config_parameters does not accept Date field,
+ we get directly the date formated string stored into the Char config field,
+ as we directly use this string in the sql queries.
+ To avoid sql injections when using this config param,
+ we ensure the date string can be effectively a date."""
+ str_date = self.env['ir.config_parameter'].sudo().get_param('crm.pls_start_date')
+ if not fields.Date.to_date(str_date):
+ return False
+ return str_date
+
+ def _pls_get_safe_fields(self):
+ """ As config_parameters does not accept M2M field,
+ we the fields from the formated string stored into the Char config field.
+ To avoid sql injections when using that list, we return only the fields
+ that are defined on the model. """
+ pls_fields_config = self.env['ir.config_parameter'].sudo().get_param('crm.pls_fields')
+ pls_fields = pls_fields_config.split(',') if pls_fields_config else []
+ pls_safe_fields = [field for field in pls_fields if field in self._fields.keys()]
+ return pls_safe_fields
+
+ # Compute Automated Probability Tools
+ # -----------------------------------
+ def _pls_get_won_lost_total_count(self, team_results):
+ """ Get all won and all lost + total :
+ first stage can be used to know how many lost and won there is
+ as won count are equals for all stage
+ and first stage is always incremented in lost_count
+ :param frequencies: lead_scoring_frequencies
+ :return: won count, lost count and total count for all records in frequencies
+ """
+ # TODO : check if we need to handle specific team_id stages [for lost count] (if first stage in sequence is team_specific)
+ first_stage_id = self.env['crm.stage'].search([('team_id', '=', False)], order='sequence', limit=1)
+ if str(first_stage_id.id) not in team_results.get('stage_id', []):
+ return 0, 0, 0
+ stage_result = team_results['stage_id'][str(first_stage_id.id)]
+ return stage_result['won'], stage_result['lost'], stage_result['won'] + stage_result['lost']
+
+ # PLS: Rebuild Frequency Table Tools
+ # ----------------------------------
+ def _pls_prepare_frequencies(self, lead_values, leads_pls_fields, target_state=None):
+ """new state is used when getting frequencies for leads that are changing to lost or won.
+ Stays none if we are checking frequencies for leads already won or lost."""
+ # Frequencies must include tag_id
+ pls_fields = set(leads_pls_fields + ['tag_id'])
+ frequencies = dict((field, {}) for field in pls_fields)
+
+ stage_ids = self.env['crm.stage'].search_read([], ['sequence', 'name', 'id'], order='sequence')
+ stage_sequences = {stage['id']: stage['sequence'] for stage in stage_ids}
+
+ # Increment won / lost frequencies by criteria (field / value couple)
+ for values in lead_values:
+ if target_state: # ignore probability values if target state (as probability is the old value)
+ won_count = values['count'] if target_state == 'won' else 0
+ lost_count = values['count'] if target_state == 'lost' else 0
+ else:
+ won_count = values['count'] if values.get('probability', 0) == 100 else 0
+ lost_count = values['count'] if values.get('probability', 1) == 0 else 0
+
+ if 'tag_id' in values:
+ frequencies = self._pls_increment_frequency_dict(frequencies, 'tag_id', values['tag_id'], won_count, lost_count)
+ continue
+
+ # Else, treat other fields
+ if 'tag_id' in pls_fields: # tag_id already treated here above.
+ pls_fields.remove('tag_id')
+ for field in pls_fields:
+ if field not in values:
+ continue
+ value = values[field]
+ if value or field in ('email_state', 'phone_state'):
+ if field == 'stage_id':
+ if won_count: # increment all stages if won
+ stages_to_increment = [stage['id'] for stage in stage_ids]
+ else: # increment only current + previous stages if lost
+ current_stage_sequence = stage_sequences[value]
+ stages_to_increment = [stage['id'] for stage in stage_ids if stage['sequence'] <= current_stage_sequence]
+ for stage_id in stages_to_increment:
+ frequencies = self._pls_increment_frequency_dict(frequencies, field, stage_id, won_count, lost_count)
+ else:
+ frequencies = self._pls_increment_frequency_dict(frequencies, field, value, won_count, lost_count)
+
+ return frequencies
+
+ def _pls_increment_frequency_dict(self, frequencies, field, value, won, lost):
+ value = str(value) # Ensure we will always compare strings.
+ if value not in frequencies[field]:
+ frequencies[field][value] = {'won': won, 'lost': lost}
+ else:
+ frequencies[field][value]['won'] += won
+ frequencies[field][value]['lost'] += lost
+ return frequencies
+
+ # Common PLS Tools
+ # ----------------
+ def _pls_get_lead_pls_values(self, domain=[]):
+ """
+ This methods builds a dict where, for each lead in self or matching the given domain,
+ we will get a list of field/value couple.
+ Due to onchange and create, we don't always have the id of the lead to recompute.
+ When we update few records (one, typically) with onchanges, we build the lead_values (= couple field/value)
+ using the ORM.
+ To speed up the computation and avoid making too much DB read inside loops,
+ we can give a domain to make sql queries to bypass the ORM.
+ This domain will be used in sql queries to get the values for every lead matching the domain.
+ :param domain: If set, we get all the leads values via unique sql queries (one for tags, one for other fields),
+ using the given domain on leads.
+ If not set, get lead values lead by lead using the ORM.
+ :return: {lead_id: [(field1: value1), (field2: value2), ...], ...}
+ """
+ leads_values_dict = OrderedDict()
+ pls_fields = ["stage_id", "team_id"] + self._pls_get_safe_fields()
+
+ if domain:
+ # active_test = False as domain should take active into 'active' field it self
+ from_clause, where_clause, where_params = self.env['crm.lead'].with_context(active_test=False)._where_calc(domain).get_sql()
+ str_fields = ", ".join(["{}"] * len(pls_fields))
+ args = [sql.Identifier(field) for field in pls_fields]
+
+ # Get leads values
+ self.flush(['probability'])
+ query = """SELECT id, probability, %s
+ FROM %s
+ WHERE %s order by team_id asc"""
+ query = sql.SQL(query % (str_fields, from_clause, where_clause)).format(*args)
+ self._cr.execute(query, where_params)
+ lead_results = self._cr.dictfetchall()
+
+ # Get tags values
+ query = """SELECT crm_lead.id as lead_id, t.id as tag_id
+ FROM %s
+ LEFT JOIN crm_tag_rel rel ON crm_lead.id = rel.lead_id
+ LEFT JOIN crm_tag t ON rel.tag_id = t.id
+ WHERE %s order by crm_lead.team_id asc"""
+ query = sql.SQL(query % (from_clause, where_clause)).format(*args)
+ self._cr.execute(query, where_params)
+ tag_results = self._cr.dictfetchall()
+
+ # get all (variable, value) couple for all in self
+ for lead in lead_results:
+ lead_values = []
+ for field in pls_fields + ['probability']: # add probability as used in _pls_prepare_frequencies (needed in rebuild mode)
+ value = lead[field]
+ if field == 'team_id': # ignore team_id as stored separately in leads_values_dict[lead_id][team_id]
+ continue
+ if value or field == 'probability': # 0 is a correct value for probability
+ lead_values.append((field, value))
+ elif field in ('email_state', 'phone_state'): # As ORM reads 'None' as 'False', do the same here
+ lead_values.append((field, False))
+ leads_values_dict[lead['id']] = {'values': lead_values, 'team_id': lead['team_id'] or 0}
+
+ for tag in tag_results:
+ if tag['tag_id']:
+ leads_values_dict[tag['lead_id']]['values'].append(('tag_id', tag['tag_id']))
+ return leads_values_dict
+ else:
+ for lead in self:
+ lead_values = []
+ for field in pls_fields:
+ if field == 'team_id': # ignore team_id as stored separately in leads_values_dict[lead_id][team_id]
+ continue
+ value = lead[field].id if isinstance(lead[field], models.BaseModel) else lead[field]
+ if value or field in ('email_state', 'phone_state'):
+ lead_values.append((field, value))
+ for tag in lead.tag_ids:
+ lead_values.append(('tag_id', tag.id))
+ leads_values_dict[lead.id] = {'values': lead_values, 'team_id': lead['team_id'].id}
+ return leads_values_dict
diff --git a/addons/crm/models/crm_lead_scoring_frequency.py b/addons/crm/models/crm_lead_scoring_frequency.py
new file mode 100644
index 00000000..85b5c77b
--- /dev/null
+++ b/addons/crm/models/crm_lead_scoring_frequency.py
@@ -0,0 +1,24 @@
+# -*- coding: utf-8 -*-
+from odoo import fields, models
+
+
+class LeadScoringFrequency(models.Model):
+ _name = 'crm.lead.scoring.frequency'
+ _description = 'Lead Scoring Frequency'
+
+ variable = fields.Char('Variable', index=True)
+ value = fields.Char('Value')
+ won_count = fields.Float('Won Count', digits=(16, 1)) # Float because we add 0.1 to avoid zero Frequency issue
+ lost_count = fields.Float('Lost Count', digits=(16, 1)) # Float because we add 0.1 to avoid zero Frequency issue
+ team_id = fields.Many2one('crm.team', 'Sales Team')
+
+
+class FrequencyField(models.Model):
+ _name = 'crm.lead.scoring.frequency.field'
+ _description = 'Fields that can be used for predictive lead scoring computation'
+
+ name = fields.Char(related="field_id.field_description")
+ field_id = fields.Many2one(
+ 'ir.model.fields', domain=[('model_id.model', '=', 'crm.lead')], required=True,
+ ondelete='cascade',
+ )
diff --git a/addons/crm/models/crm_lost_reason.py b/addons/crm/models/crm_lost_reason.py
new file mode 100644
index 00000000..4b848acb
--- /dev/null
+++ b/addons/crm/models/crm_lost_reason.py
@@ -0,0 +1,29 @@
+# -*- coding: utf-8 -*-
+# Part of Odoo. See LICENSE file for full copyright and licensing details.
+
+from odoo import fields, models, _
+
+
+class LostReason(models.Model):
+ _name = "crm.lost.reason"
+ _description = 'Opp. Lost Reason'
+
+ name = fields.Char('Description', required=True, translate=True)
+ active = fields.Boolean('Active', default=True)
+ leads_count = fields.Integer('Leads Count', compute='_compute_leads_count')
+
+ def _compute_leads_count(self):
+ lead_data = self.env['crm.lead'].with_context(active_test=False).read_group([('lost_reason', 'in', self.ids)], ['lost_reason'], ['lost_reason'])
+ mapped_data = dict((data['lost_reason'][0], data['lost_reason_count']) for data in lead_data)
+ for reason in self:
+ reason.leads_count = mapped_data.get(reason.id, 0)
+
+ def action_lost_leads(self):
+ return {
+ 'name': _('Leads'),
+ 'view_mode': 'tree,form',
+ 'domain': [('lost_reason', 'in', self.ids)],
+ 'res_model': 'crm.lead',
+ 'type': 'ir.actions.act_window',
+ 'context': {'create': False, 'active_test': False},
+ }
diff --git a/addons/crm/models/crm_recurring_plan.py b/addons/crm/models/crm_recurring_plan.py
new file mode 100644
index 00000000..aa9e7f91
--- /dev/null
+++ b/addons/crm/models/crm_recurring_plan.py
@@ -0,0 +1,19 @@
+# -*- coding: utf-8 -*-
+# Part of Odoo. See LICENSE file for full copyright and licensing details.
+
+from odoo import fields, models
+
+
+class RecurringPlan(models.Model):
+ _name = "crm.recurring.plan"
+ _description = "CRM Recurring revenue plans"
+ _order = "sequence"
+
+ name = fields.Char('Plan Name', required=True, translate=True)
+ number_of_months = fields.Integer('# Months', required=True)
+ active = fields.Boolean('Active', default=True)
+ sequence = fields.Integer('Sequence', default=10)
+
+ _sql_constraints = [
+ ('check_number_of_months', 'CHECK(number_of_months >= 0)', 'The number of month can\'t be negative.'),
+ ]
diff --git a/addons/crm/models/crm_stage.py b/addons/crm/models/crm_stage.py
new file mode 100644
index 00000000..5cd1a793
--- /dev/null
+++ b/addons/crm/models/crm_stage.py
@@ -0,0 +1,49 @@
+# -*- coding: utf-8 -*-
+# Part of Odoo. See LICENSE file for full copyright and licensing details.
+
+from odoo import api, fields, models
+
+AVAILABLE_PRIORITIES = [
+ ('0', 'Low'),
+ ('1', 'Medium'),
+ ('2', 'High'),
+ ('3', 'Very High'),
+]
+
+
+class Stage(models.Model):
+ """ Model for case stages. This models the main stages of a document
+ management flow. Main CRM objects (leads, opportunities, project
+ issues, ...) will now use only stages, instead of state and stages.
+ Stages are for example used to display the kanban view of records.
+ """
+ _name = "crm.stage"
+ _description = "CRM Stages"
+ _rec_name = 'name'
+ _order = "sequence, name, id"
+
+ @api.model
+ def default_get(self, fields):
+ """ Hack : when going from the pipeline, creating a stage with a sales team in
+ context should not create a stage for the current Sales Team only
+ """
+ ctx = dict(self.env.context)
+ if ctx.get('default_team_id') and not ctx.get('crm_team_mono'):
+ ctx.pop('default_team_id')
+ return super(Stage, self.with_context(ctx)).default_get(fields)
+
+ name = fields.Char('Stage Name', required=True, translate=True)
+ sequence = fields.Integer('Sequence', default=1, help="Used to order stages. Lower is better.")
+ is_won = fields.Boolean('Is Won Stage?')
+ requirements = fields.Text('Requirements', help="Enter here the internal requirements for this stage (ex: Offer sent to customer). It will appear as a tooltip over the stage's name.")
+ team_id = fields.Many2one('crm.team', string='Sales Team', ondelete='set null',
+ help='Specific team that uses this stage. Other teams will not be able to see or use this stage.')
+ fold = fields.Boolean('Folded in Pipeline',
+ help='This stage is folded in the kanban view when there are no records in that stage to display.')
+
+ # This field for interface only
+ team_count = fields.Integer('team_count', compute='_compute_team_count')
+
+ def _compute_team_count(self):
+ for stage in self:
+ stage.team_count = self.env['crm.team'].search_count([])
diff --git a/addons/crm/models/crm_team.py b/addons/crm/models/crm_team.py
new file mode 100644
index 00000000..791755e9
--- /dev/null
+++ b/addons/crm/models/crm_team.py
@@ -0,0 +1,183 @@
+# -*- coding: utf-8 -*-
+# Part of Odoo. See LICENSE file for full copyright and licensing details.
+
+import ast
+import datetime
+
+from odoo import api, fields, models, _
+from odoo.tools.safe_eval import safe_eval
+
+
+class Team(models.Model):
+ _name = 'crm.team'
+ _inherit = ['mail.alias.mixin', 'crm.team']
+ _description = 'Sales Team'
+
+ use_leads = fields.Boolean('Leads', help="Check this box to filter and qualify incoming requests as leads before converting them into opportunities and assigning them to a salesperson.")
+ use_opportunities = fields.Boolean('Pipeline', default=True, help="Check this box to manage a presales process with opportunities.")
+ alias_id = fields.Many2one(
+ 'mail.alias', string='Alias', ondelete="restrict", required=True,
+ help="The email address associated with this channel. New emails received will automatically create new leads assigned to the channel.")
+ # statistics about leads / opportunities / both
+ lead_unassigned_count = fields.Integer(
+ string='# Unassigned Leads', compute='_compute_lead_unassigned_count')
+ lead_all_assigned_month_count = fields.Integer(
+ string='# Leads/Opps assigned this month', compute='_compute_lead_all_assigned_month_count',
+ help="Number of leads and opportunities assigned this last month.")
+ opportunities_count = fields.Integer(
+ string='# Opportunities', compute='_compute_opportunities_data')
+ opportunities_amount = fields.Monetary(
+ string='Opportunities Revenues', compute='_compute_opportunities_data')
+ opportunities_overdue_count = fields.Integer(
+ string='# Overdue Opportunities', compute='_compute_opportunities_overdue_data')
+ opportunities_overdue_amount = fields.Monetary(
+ string='Overdue Opportunities Revenues', compute='_compute_opportunities_overdue_data',)
+ # alias: improve fields coming from _inherits, use inherited to avoid replacing them
+ alias_user_id = fields.Many2one(
+ 'res.users', related='alias_id.alias_user_id', inherited=True,
+ domain=lambda self: [('groups_id', 'in', self.env.ref('sales_team.group_sale_salesman_all_leads').id)])
+
+ def _compute_lead_unassigned_count(self):
+ leads_data = self.env['crm.lead'].read_group([
+ ('team_id', 'in', self.ids),
+ ('type', '=', 'lead'),
+ ('user_id', '=', False),
+ ], ['team_id'], ['team_id'])
+ counts = {datum['team_id'][0]: datum['team_id_count'] for datum in leads_data}
+ for team in self:
+ team.lead_unassigned_count = counts.get(team.id, 0)
+
+ def _compute_lead_all_assigned_month_count(self):
+ limit_date = datetime.datetime.now() - datetime.timedelta(days=30)
+ leads_data = self.env['crm.lead'].read_group([
+ ('team_id', 'in', self.ids),
+ ('date_open', '>=', fields.Datetime.to_string(limit_date)),
+ ('user_id', '!=', False),
+ ], ['team_id'], ['team_id'])
+ counts = {datum['team_id'][0]: datum['team_id_count'] for datum in leads_data}
+ for team in self:
+ team.lead_all_assigned_month_count = counts.get(team.id, 0)
+
+ def _compute_opportunities_data(self):
+ opportunity_data = self.env['crm.lead'].read_group([
+ ('team_id', 'in', self.ids),
+ ('probability', '<', 100),
+ ('type', '=', 'opportunity'),
+ ], ['expected_revenue:sum', 'team_id'], ['team_id'])
+ counts = {datum['team_id'][0]: datum['team_id_count'] for datum in opportunity_data}
+ amounts = {datum['team_id'][0]: datum['expected_revenue'] for datum in opportunity_data}
+ for team in self:
+ team.opportunities_count = counts.get(team.id, 0)
+ team.opportunities_amount = amounts.get(team.id, 0)
+
+ def _compute_opportunities_overdue_data(self):
+ opportunity_data = self.env['crm.lead'].read_group([
+ ('team_id', 'in', self.ids),
+ ('probability', '<', 100),
+ ('type', '=', 'opportunity'),
+ ('date_deadline', '<', fields.Date.to_string(fields.Datetime.now()))
+ ], ['expected_revenue', 'team_id'], ['team_id'])
+ counts = {datum['team_id'][0]: datum['team_id_count'] for datum in opportunity_data}
+ amounts = {datum['team_id'][0]: (datum['expected_revenue']) for datum in opportunity_data}
+ for team in self:
+ team.opportunities_overdue_count = counts.get(team.id, 0)
+ team.opportunities_overdue_amount = amounts.get(team.id, 0)
+
+ @api.onchange('use_leads', 'use_opportunities')
+ def _onchange_use_leads_opportunities(self):
+ if not self.use_leads and not self.use_opportunities:
+ self.alias_name = False
+
+ # ------------------------------------------------------------
+ # ORM
+ # ------------------------------------------------------------
+
+ def write(self, vals):
+ result = super(Team, self).write(vals)
+ if 'use_leads' in vals or 'use_opportunities' in vals:
+ for team in self:
+ alias_vals = team._alias_get_creation_values()
+ team.write({
+ 'alias_name': alias_vals.get('alias_name', team.alias_name),
+ 'alias_defaults': alias_vals.get('alias_defaults'),
+ })
+ return result
+
+ # ------------------------------------------------------------
+ # MESSAGING
+ # ------------------------------------------------------------
+
+ def _alias_get_creation_values(self):
+ values = super(Team, self)._alias_get_creation_values()
+ values['alias_model_id'] = self.env['ir.model']._get('crm.lead').id
+ if self.id:
+ if not self.use_leads and not self.use_opportunities:
+ values['alias_name'] = False
+ values['alias_defaults'] = defaults = ast.literal_eval(self.alias_defaults or "{}")
+ has_group_use_lead = self.env.user.has_group('crm.group_use_lead')
+ defaults['type'] = 'lead' if has_group_use_lead and self.use_leads else 'opportunity'
+ defaults['team_id'] = self.id
+ return values
+
+ # ------------------------------------------------------------
+ # ACTIONS
+ # ------------------------------------------------------------
+
+ #TODO JEM : refactor this stuff with xml action, proper customization,
+ @api.model
+ def action_your_pipeline(self):
+ action = self.env["ir.actions.actions"]._for_xml_id("crm.crm_lead_action_pipeline")
+ user_team_id = self.env.user.sale_team_id.id
+ if user_team_id:
+ # To ensure that the team is readable in multi company
+ user_team_id = self.search([('id', '=', user_team_id)], limit=1).id
+ else:
+ user_team_id = self.search([], limit=1).id
+ action['help'] = _("""<p class='o_view_nocontent_smiling_face'>Add new opportunities</p><p>
+ Looks like you are not a member of a Sales Team. You should add yourself
+ as a member of one of the Sales Team.
+</p>""")
+ if user_team_id:
+ action['help'] += _("<p>As you don't belong to any Sales Team, Odoo opens the first one by default.</p>")
+
+ action_context = safe_eval(action['context'], {'uid': self.env.uid})
+ if user_team_id:
+ action_context['default_team_id'] = user_team_id
+
+ action['context'] = action_context
+ return action
+
+ def _compute_dashboard_button_name(self):
+ super(Team, self)._compute_dashboard_button_name()
+ team_with_pipelines = self.filtered(lambda el: el.use_opportunities)
+ team_with_pipelines.update({'dashboard_button_name': _("Pipeline")})
+
+ def action_primary_channel_button(self):
+ if self.use_opportunities:
+ return self.env["ir.actions.actions"]._for_xml_id("crm.crm_case_form_view_salesteams_opportunity")
+ return super(Team,self).action_primary_channel_button()
+
+ def _graph_get_model(self):
+ if self.use_opportunities:
+ return 'crm.lead'
+ return super(Team,self)._graph_get_model()
+
+ def _graph_date_column(self):
+ if self.use_opportunities:
+ return 'create_date'
+ return super(Team,self)._graph_date_column()
+
+ def _graph_y_query(self):
+ if self.use_opportunities:
+ return 'count(*)'
+ return super(Team,self)._graph_y_query()
+
+ def _extra_sql_conditions(self):
+ if self.use_opportunities:
+ return "AND type LIKE 'opportunity'"
+ return super(Team,self)._extra_sql_conditions()
+
+ def _graph_title_and_key(self):
+ if self.use_opportunities:
+ return ['', _('New Opportunities')] # no more title
+ return super(Team,self)._graph_title_and_key()
diff --git a/addons/crm/models/digest.py b/addons/crm/models/digest.py
new file mode 100644
index 00000000..acb529f1
--- /dev/null
+++ b/addons/crm/models/digest.py
@@ -0,0 +1,46 @@
+# -*- coding: utf-8 -*-
+# Part of Odoo. See LICENSE file for full copyright and licensing details.
+
+from odoo import api, fields, models, _
+from odoo.exceptions import AccessError
+
+
+class Digest(models.Model):
+ _inherit = 'digest.digest'
+
+ kpi_crm_lead_created = fields.Boolean('New Leads/Opportunities')
+ kpi_crm_lead_created_value = fields.Integer(compute='_compute_kpi_crm_lead_created_value')
+ kpi_crm_opportunities_won = fields.Boolean('Opportunities Won')
+ kpi_crm_opportunities_won_value = fields.Integer(compute='_compute_kpi_crm_opportunities_won_value')
+
+ def _compute_kpi_crm_lead_created_value(self):
+ if not self.env.user.has_group('sales_team.group_sale_salesman'):
+ raise AccessError(_("Do not have access, skip this data for user's digest email"))
+ for record in self:
+ start, end, company = record._get_kpi_compute_parameters()
+ record.kpi_crm_lead_created_value = self.env['crm.lead'].search_count([
+ ('create_date', '>=', start),
+ ('create_date', '<', end),
+ ('company_id', '=', company.id)
+ ])
+
+ def _compute_kpi_crm_opportunities_won_value(self):
+ if not self.env.user.has_group('sales_team.group_sale_salesman'):
+ raise AccessError(_("Do not have access, skip this data for user's digest email"))
+ for record in self:
+ start, end, company = record._get_kpi_compute_parameters()
+ record.kpi_crm_opportunities_won_value = self.env['crm.lead'].search_count([
+ ('type', '=', 'opportunity'),
+ ('probability', '=', '100'),
+ ('date_closed', '>=', start),
+ ('date_closed', '<', end),
+ ('company_id', '=', company.id)
+ ])
+
+ def _compute_kpis_actions(self, company, user):
+ res = super(Digest, self)._compute_kpis_actions(company, user)
+ res['kpi_crm_lead_created'] = 'crm.crm_lead_action_pipeline&menu_id=%s' % self.env.ref('crm.crm_menu_root').id
+ res['kpi_crm_opportunities_won'] = 'crm.crm_lead_action_pipeline&menu_id=%s' % self.env.ref('crm.crm_menu_root').id
+ if user.has_group('crm.group_use_lead'):
+ res['kpi_crm_lead_created'] = 'crm.crm_lead_all_leads&menu_id=%s' % self.env.ref('crm.crm_menu_root').id
+ return res
diff --git a/addons/crm/models/ir_config_parameter.py b/addons/crm/models/ir_config_parameter.py
new file mode 100644
index 00000000..af74a83e
--- /dev/null
+++ b/addons/crm/models/ir_config_parameter.py
@@ -0,0 +1,31 @@
+# -*- coding: utf-8 -*-
+# Part of Odoo. See LICENSE file for full copyright and licensing details.
+
+from odoo import api, models
+
+
+class IrConfigParameter(models.Model):
+ _inherit = 'ir.config_parameter'
+
+ def write(self, vals):
+ result = super(IrConfigParameter, self).write(vals)
+ if any(record.key == "crm.pls_fields" for record in self):
+ self.flush()
+ self.env.registry.setup_models(self.env.cr)
+ return result
+
+ @api.model_create_multi
+ def create(self, vals_list):
+ records = super(IrConfigParameter, self).create(vals_list)
+ if any(record.key == "crm.pls_fields" for record in records):
+ self.flush()
+ self.env.registry.setup_models(self.env.cr)
+ return records
+
+ def unlink(self):
+ pls_emptied = any(record.key == "crm.pls_fields" for record in self)
+ result = super(IrConfigParameter, self).unlink()
+ if pls_emptied:
+ self.flush()
+ self.env.registry.setup_models(self.env.cr)
+ return pls_emptied
diff --git a/addons/crm/models/res_config_settings.py b/addons/crm/models/res_config_settings.py
new file mode 100644
index 00000000..66fbc733
--- /dev/null
+++ b/addons/crm/models/res_config_settings.py
@@ -0,0 +1,127 @@
+# -*- coding: utf-8 -*-
+# Part of Odoo. See LICENSE file for full copyright and licensing details.
+
+from datetime import timedelta
+
+from odoo import api, fields, models
+
+
+class ResConfigSettings(models.TransientModel):
+ _inherit = 'res.config.settings'
+
+ crm_alias_prefix = fields.Char(
+ 'Default Alias Name for Leads',
+ compute="_compute_crm_alias_prefix" , readonly=False, store=True)
+ generate_lead_from_alias = fields.Boolean(
+ 'Manual Assignment of Emails', config_parameter='crm.generate_lead_from_alias',
+ compute="_compute_generate_lead_from_alias", readonly=False, store=True)
+ group_use_lead = fields.Boolean(string="Leads", implied_group='crm.group_use_lead')
+ group_use_recurring_revenues = fields.Boolean(string="Recurring Revenues", implied_group='crm.group_use_recurring_revenues')
+ module_crm_iap_lead = fields.Boolean("Generate new leads based on their country, industries, size, etc.")
+ module_crm_iap_lead_website = fields.Boolean("Create Leads/Opportunities from your website's traffic")
+ module_crm_iap_lead_enrich = fields.Boolean("Enrich your leads automatically with company data based on their email address.")
+ module_mail_client_extension = fields.Boolean("See and manage users, companies, and leads from our mail client extensions.")
+ lead_enrich_auto = fields.Selection([
+ ('manual', 'Enrich leads on demand only'),
+ ('auto', 'Enrich all leads automatically'),
+ ], string='Enrich lead automatically', default='manual', config_parameter='crm.iap.lead.enrich.setting')
+ lead_mining_in_pipeline = fields.Boolean("Create a lead mining request directly from the opportunity pipeline.", config_parameter='crm.lead_mining_in_pipeline')
+ predictive_lead_scoring_start_date = fields.Date(string='Lead Scoring Starting Date', compute="_compute_pls_start_date", inverse="_inverse_pls_start_date_str")
+ predictive_lead_scoring_start_date_str = fields.Char(string='Lead Scoring Starting Date in String', config_parameter='crm.pls_start_date')
+ predictive_lead_scoring_fields = fields.Many2many('crm.lead.scoring.frequency.field', string='Lead Scoring Frequency Fields', compute="_compute_pls_fields", inverse="_inverse_pls_fields_str")
+ predictive_lead_scoring_fields_str = fields.Char(string='Lead Scoring Frequency Fields in String', config_parameter='crm.pls_fields')
+
+ def _find_default_lead_alias_id(self):
+ alias = self.env.ref('crm.mail_alias_lead_info', False)
+ if not alias:
+ alias = self.env['mail.alias'].search([
+ ('alias_model_id.model', '=', 'crm.lead'),
+ ('alias_force_thread_id', '=', False),
+ ('alias_parent_model_id.model', '=', 'crm.team'),
+ ('alias_parent_thread_id', '=', False),
+ ('alias_defaults', '=', '{}')
+ ], limit=1)
+ return alias
+
+ @api.depends('predictive_lead_scoring_fields_str')
+ def _compute_pls_fields(self):
+ """ As config_parameters does not accept m2m field,
+ we get the fields back from the Char config field, to ease the configuration in config panel """
+ for setting in self:
+ if setting.predictive_lead_scoring_fields_str:
+ names = setting.predictive_lead_scoring_fields_str.split(',')
+ fields = self.env['ir.model.fields'].search([('name', 'in', names), ('model', '=', 'crm.lead')])
+ setting.predictive_lead_scoring_fields = self.env['crm.lead.scoring.frequency.field'].search([('field_id', 'in', fields.ids)])
+ else:
+ setting.predictive_lead_scoring_fields = None
+
+ def _inverse_pls_fields_str(self):
+ """ As config_parameters does not accept m2m field,
+ we store the fields with a comma separated string into a Char config field """
+ for setting in self:
+ if setting.predictive_lead_scoring_fields:
+ setting.predictive_lead_scoring_fields_str = ','.join(setting.predictive_lead_scoring_fields.mapped('field_id.name'))
+ else:
+ setting.predictive_lead_scoring_fields_str = ''
+
+ @api.depends('predictive_lead_scoring_start_date_str')
+ def _compute_pls_start_date(self):
+ """ As config_parameters does not accept Date field,
+ we get the date back from the Char config field, to ease the configuration in config panel """
+ for setting in self:
+ lead_scoring_start_date = setting.predictive_lead_scoring_start_date_str
+ # if config param is deleted / empty, set the date 8 days prior to current date
+ if not lead_scoring_start_date:
+ setting.predictive_lead_scoring_start_date = fields.Date.to_date(fields.Date.today() - timedelta(days=8))
+ else:
+ try:
+ setting.predictive_lead_scoring_start_date = fields.Date.to_date(lead_scoring_start_date)
+ except ValueError:
+ # the config parameter is malformed, so set the date 8 days prior to current date
+ setting.predictive_lead_scoring_start_date = fields.Date.to_date(fields.Date.today() - timedelta(days=8))
+
+ def _inverse_pls_start_date_str(self):
+ """ As config_parameters does not accept Date field,
+ we store the date formated string into a Char config field """
+ for setting in self:
+ if setting.predictive_lead_scoring_start_date:
+ setting.predictive_lead_scoring_start_date_str = fields.Date.to_string(setting.predictive_lead_scoring_start_date)
+
+ @api.depends('group_use_lead')
+ def _compute_generate_lead_from_alias(self):
+ """ Reset alias / leads configuration if leads are not used """
+ for setting in self.filtered(lambda r: not r.group_use_lead):
+ setting.generate_lead_from_alias = False
+
+ @api.depends('generate_lead_from_alias')
+ def _compute_crm_alias_prefix(self):
+ for setting in self:
+ setting.crm_alias_prefix = (setting.crm_alias_prefix or 'contact') if setting.generate_lead_from_alias else False
+
+ @api.model
+ def get_values(self):
+ res = super(ResConfigSettings, self).get_values()
+ alias = self._find_default_lead_alias_id()
+ res.update(
+ crm_alias_prefix=alias.alias_name if alias else False,
+ )
+ return res
+
+ def set_values(self):
+ super(ResConfigSettings, self).set_values()
+ alias = self._find_default_lead_alias_id()
+ if alias:
+ alias.write({'alias_name': self.crm_alias_prefix})
+ else:
+ self.env['mail.alias'].create({
+ 'alias_name': self.crm_alias_prefix,
+ 'alias_model_id': self.env['ir.model']._get('crm.lead').id,
+ 'alias_parent_model_id': self.env['ir.model']._get('crm.team').id,
+ })
+ for team in self.env['crm.team'].search([]):
+ team.alias_id.write(team._alias_get_creation_values())
+
+ # ACTIONS
+ def action_reset_lead_probabilities(self):
+ if self.env.user._is_admin():
+ self.env['crm.lead'].sudo()._cron_update_automated_probabilities()
diff --git a/addons/crm/models/res_partner.py b/addons/crm/models/res_partner.py
new file mode 100644
index 00000000..a040f82e
--- /dev/null
+++ b/addons/crm/models/res_partner.py
@@ -0,0 +1,116 @@
+# -*- coding: utf-8 -*-
+# Part of Odoo. See LICENSE file for full copyright and licensing details.
+
+from odoo import api, fields, models
+
+
+class Partner(models.Model):
+ _name = 'res.partner'
+ _inherit = 'res.partner'
+
+ team_id = fields.Many2one('crm.team', string='Sales Team')
+ opportunity_ids = fields.One2many('crm.lead', 'partner_id', string='Opportunities', domain=[('type', '=', 'opportunity')])
+ meeting_ids = fields.Many2many('calendar.event', 'calendar_event_res_partner_rel', 'res_partner_id', 'calendar_event_id', string='Meetings', copy=False)
+ opportunity_count = fields.Integer("Opportunity", compute='_compute_opportunity_count')
+ meeting_count = fields.Integer("# Meetings", compute='_compute_meeting_count')
+
+ @api.model
+ def default_get(self, fields):
+ rec = super(Partner, self).default_get(fields)
+ active_model = self.env.context.get('active_model')
+ if active_model == 'crm.lead' and len(self.env.context.get('active_ids', [])) <= 1:
+ lead = self.env[active_model].browse(self.env.context.get('active_id')).exists()
+ if lead:
+ rec.update(
+ phone=lead.phone,
+ mobile=lead.mobile,
+ function=lead.function,
+ title=lead.title.id,
+ website=lead.website,
+ street=lead.street,
+ street2=lead.street2,
+ city=lead.city,
+ state_id=lead.state_id.id,
+ country_id=lead.country_id.id,
+ zip=lead.zip,
+ )
+ return rec
+
+ def _compute_opportunity_count(self):
+ # retrieve all children partners and prefetch 'parent_id' on them
+ all_partners = self.with_context(active_test=False).search([('id', 'child_of', self.ids)])
+ all_partners.read(['parent_id'])
+
+ opportunity_data = self.env['crm.lead'].read_group(
+ domain=[('partner_id', 'in', all_partners.ids)],
+ fields=['partner_id'], groupby=['partner_id']
+ )
+
+ self.opportunity_count = 0
+ for group in opportunity_data:
+ partner = self.browse(group['partner_id'][0])
+ while partner:
+ if partner in self:
+ partner.opportunity_count += group['partner_id_count']
+ partner = partner.parent_id
+
+ def _compute_meeting_count(self):
+ result = self._compute_meeting()
+ for p in self:
+ p.meeting_count = len(result.get(p.id, []))
+
+ def _compute_meeting(self):
+ if self.ids:
+ all_partners = self.with_context(active_test=False).search([('id', 'child_of', self.ids)])
+ self.env.cr.execute("""
+ SELECT res_partner_id, calendar_event_id, count(1)
+ FROM calendar_event_res_partner_rel
+ WHERE res_partner_id IN %s
+ GROUP BY res_partner_id, calendar_event_id
+ """, [tuple(all_partners.ids)])
+ meeting_data = self.env.cr.fetchall()
+
+ # Keep only valid meeting data based on record rules of events
+ events = [row[1] for row in meeting_data]
+ events = self.env['calendar.event'].search([('id', 'in', events)]).ids
+ meeting_data = [m for m in meeting_data if m[1] in events]
+
+ # Create a dict {partner_id: event_ids} and fill with events linked to the partner
+ meetings = {p.id: set() for p in all_partners}
+ for m in meeting_data:
+ meetings[m[0]].add(m[1])
+
+ # Add the events linked to the children of the partner
+ all_partners.read(['parent_id'])
+ for p in all_partners:
+ partner = p
+ while partner:
+ if partner in self:
+ meetings[partner.id] |= meetings[p.id]
+ partner = partner.parent_id
+ return {p.id: list(meetings[p.id]) for p in self if p.id}
+ return {}
+
+
+ def schedule_meeting(self):
+ self.ensure_one()
+ partner_ids = self.ids
+ partner_ids.append(self.env.user.partner_id.id)
+ action = self.env["ir.actions.actions"]._for_xml_id("calendar.action_calendar_event")
+ action['context'] = {
+ 'default_partner_ids': partner_ids,
+ 'default_attendee_ids': [(0, 0, {'partner_id': pid}) for pid in partner_ids],
+ }
+ action['domain'] = ['|', ('id', 'in', self._compute_meeting()[self.id]), ('partner_ids', 'in', self.ids)]
+ return action
+
+ def action_view_opportunity(self):
+ '''
+ This function returns an action that displays the opportunities from partner.
+ '''
+ action = self.env['ir.actions.act_window']._for_xml_id('crm.crm_lead_opportunities')
+ if self.is_company:
+ action['domain'] = [('partner_id.commercial_partner_id.id', '=', self.id)]
+ else:
+ action['domain'] = [('partner_id.id', '=', self.id)]
+ return action
diff --git a/addons/crm/models/res_users.py b/addons/crm/models/res_users.py
new file mode 100644
index 00000000..951287d5
--- /dev/null
+++ b/addons/crm/models/res_users.py
@@ -0,0 +1,11 @@
+# -*- coding: utf-8 -*-
+# Part of Odoo. See LICENSE file for full copyright and licensing details.
+
+from odoo import fields, models
+
+
+class Users(models.Model):
+ _inherit = 'res.users'
+
+ target_sales_won = fields.Integer('Won in Opportunities Target')
+ target_sales_done = fields.Integer('Activities Done Target')
diff --git a/addons/crm/models/utm.py b/addons/crm/models/utm.py
new file mode 100644
index 00000000..b1dcb5ed
--- /dev/null
+++ b/addons/crm/models/utm.py
@@ -0,0 +1,30 @@
+# -*- coding: utf-8 -*-
+# Part of Odoo. See LICENSE file for full copyright and licensing details.
+
+from odoo import fields, models, api, SUPERUSER_ID
+
+class UtmCampaign(models.Model):
+ _inherit = 'utm.campaign'
+
+ use_leads = fields.Boolean('Use Leads', compute='_compute_use_leads')
+ crm_lead_count = fields.Integer('Leads/Opportunities count', groups='sales_team.group_sale_salesman', compute="_compute_crm_lead_count")
+
+ def _compute_use_leads(self):
+ for campaign in self:
+ campaign.use_leads = self.env.user.has_group('crm.group_use_lead')
+
+ def _compute_crm_lead_count(self):
+ lead_data = self.env['crm.lead'].with_context(active_test=False).read_group([
+ ('campaign_id', 'in', self.ids)],
+ ['campaign_id'], ['campaign_id'])
+ mapped_data = {datum['campaign_id'][0]: datum['campaign_id_count'] for datum in lead_data}
+ for campaign in self:
+ campaign.crm_lead_count = mapped_data.get(campaign.id, 0)
+
+ def action_redirect_to_leads_opportunities(self):
+ view = 'crm.crm_lead_all_leads' if self.use_leads else 'crm.crm_lead_opportunities'
+ action = self.env['ir.actions.act_window']._for_xml_id(view)
+ action['view_mode'] = 'tree,kanban,graph,pivot,form,calendar'
+ action['domain'] = [('campaign_id', 'in', self.ids)]
+ action['context'] = {'active_test': False, 'create': False}
+ return action