summaryrefslogtreecommitdiff
path: root/addons/mrp/models
diff options
context:
space:
mode:
authorstephanchrst <stephanchrst@gmail.com>2022-05-10 21:51:50 +0700
committerstephanchrst <stephanchrst@gmail.com>2022-05-10 21:51:50 +0700
commit3751379f1e9a4c215fb6eb898b4ccc67659b9ace (patch)
treea44932296ef4a9b71d5f010906253d8c53727726 /addons/mrp/models
parent0a15094050bfde69a06d6eff798e9a8ddf2b8c21 (diff)
initial commit 2
Diffstat (limited to 'addons/mrp/models')
-rw-r--r--addons/mrp/models/__init__.py21
-rw-r--r--addons/mrp/models/mrp_bom.py520
-rw-r--r--addons/mrp/models/mrp_document.py24
-rw-r--r--addons/mrp/models/mrp_production.py1801
-rw-r--r--addons/mrp/models/mrp_routing.py69
-rw-r--r--addons/mrp/models/mrp_unbuild.py300
-rw-r--r--addons/mrp/models/mrp_workcenter.py346
-rw-r--r--addons/mrp/models/mrp_workorder.py816
-rw-r--r--addons/mrp/models/product.py209
-rw-r--r--addons/mrp/models/res_company.py38
-rw-r--r--addons/mrp/models/res_config_settings.py39
-rw-r--r--addons/mrp/models/stock_move.py433
-rw-r--r--addons/mrp/models/stock_orderpoint.py90
-rw-r--r--addons/mrp/models/stock_picking.py72
-rw-r--r--addons/mrp/models/stock_production_lot.py17
-rw-r--r--addons/mrp/models/stock_rule.py211
-rw-r--r--addons/mrp/models/stock_scrap.py36
-rw-r--r--addons/mrp/models/stock_traceability.py34
-rw-r--r--addons/mrp/models/stock_warehouse.py308
19 files changed, 5384 insertions, 0 deletions
diff --git a/addons/mrp/models/__init__.py b/addons/mrp/models/__init__.py
new file mode 100644
index 00000000..c6b32ff8
--- /dev/null
+++ b/addons/mrp/models/__init__.py
@@ -0,0 +1,21 @@
+# -*- coding: utf-8 -*-
+# Part of Odoo. See LICENSE file for full copyright and licensing details.
+
+from . import mrp_document
+from . import res_config_settings
+from . import mrp_bom
+from . import mrp_routing
+from . import mrp_workcenter
+from . import mrp_production
+from . import stock_traceability
+from . import mrp_unbuild
+from . import mrp_workorder
+from . import product
+from . import res_company
+from . import stock_move
+from . import stock_orderpoint
+from . import stock_picking
+from . import stock_production_lot
+from . import stock_rule
+from . import stock_scrap
+from . import stock_warehouse
diff --git a/addons/mrp/models/mrp_bom.py b/addons/mrp/models/mrp_bom.py
new file mode 100644
index 00000000..0475d415
--- /dev/null
+++ b/addons/mrp/models/mrp_bom.py
@@ -0,0 +1,520 @@
+# -*- coding: utf-8 -*-
+# Part of Odoo. See LICENSE file for full copyright and licensing details.
+
+from odoo import api, fields, models, _
+from odoo.exceptions import UserError, ValidationError
+from odoo.tools import float_round
+
+from itertools import groupby
+from collections import defaultdict
+
+
+class MrpBom(models.Model):
+ """ Defines bills of material for a product or a product template """
+ _name = 'mrp.bom'
+ _description = 'Bill of Material'
+ _inherit = ['mail.thread']
+ _rec_name = 'product_tmpl_id'
+ _order = "sequence"
+ _check_company_auto = True
+
+ def _get_default_product_uom_id(self):
+ return self.env['uom.uom'].search([], limit=1, order='id').id
+
+ code = fields.Char('Reference')
+ active = fields.Boolean(
+ 'Active', default=True,
+ help="If the active field is set to False, it will allow you to hide the bills of material without removing it.")
+ type = fields.Selection([
+ ('normal', 'Manufacture this product'),
+ ('phantom', 'Kit')], 'BoM Type',
+ default='normal', required=True)
+ product_tmpl_id = fields.Many2one(
+ 'product.template', 'Product',
+ check_company=True, index=True,
+ domain="[('type', 'in', ['product', 'consu']), '|', ('company_id', '=', False), ('company_id', '=', company_id)]", required=True)
+ product_id = fields.Many2one(
+ 'product.product', 'Product Variant',
+ check_company=True, index=True,
+ domain="['&', ('product_tmpl_id', '=', product_tmpl_id), ('type', 'in', ['product', 'consu']), '|', ('company_id', '=', False), ('company_id', '=', company_id)]",
+ help="If a product variant is defined the BOM is available only for this product.")
+ bom_line_ids = fields.One2many('mrp.bom.line', 'bom_id', 'BoM Lines', copy=True)
+ byproduct_ids = fields.One2many('mrp.bom.byproduct', 'bom_id', 'By-products', copy=True)
+ product_qty = fields.Float(
+ 'Quantity', default=1.0,
+ digits='Unit of Measure', required=True)
+ product_uom_id = fields.Many2one(
+ 'uom.uom', 'Unit of Measure',
+ default=_get_default_product_uom_id, required=True,
+ help="Unit of Measure (Unit of Measure) is the unit of measurement for the inventory control", domain="[('category_id', '=', product_uom_category_id)]")
+ product_uom_category_id = fields.Many2one(related='product_tmpl_id.uom_id.category_id')
+ sequence = fields.Integer('Sequence', help="Gives the sequence order when displaying a list of bills of material.")
+ operation_ids = fields.One2many('mrp.routing.workcenter', 'bom_id', 'Operations', copy=True)
+ ready_to_produce = fields.Selection([
+ ('all_available', ' When all components are available'),
+ ('asap', 'When components for 1st operation are available')], string='Manufacturing Readiness',
+ default='asap', help="Defines when a Manufacturing Order is considered as ready to be started", required=True)
+ picking_type_id = fields.Many2one(
+ 'stock.picking.type', 'Operation Type', domain="[('code', '=', 'mrp_operation'), ('company_id', '=', company_id)]",
+ check_company=True,
+ help=u"When a procurement has a ‘produce’ route with a operation type set, it will try to create "
+ "a Manufacturing Order for that product using a BoM of the same operation type. That allows "
+ "to define stock rules which trigger different manufacturing orders with different BoMs.")
+ company_id = fields.Many2one(
+ 'res.company', 'Company', index=True,
+ default=lambda self: self.env.company)
+ consumption = fields.Selection([
+ ('flexible', 'Allowed'),
+ ('warning', 'Allowed with warning'),
+ ('strict', 'Blocked')],
+ help="Defines if you can consume more or less components than the quantity defined on the BoM:\n"
+ " * Allowed: allowed for all manufacturing users.\n"
+ " * Allowed with warning: allowed for all manufacturing users with summary of consumption differences when closing the manufacturing order.\n"
+ " * Blocked: only a manager can close a manufacturing order when the BoM consumption is not respected.",
+ default='warning',
+ string='Flexible Consumption',
+ required=True
+ )
+
+ _sql_constraints = [
+ ('qty_positive', 'check (product_qty > 0)', 'The quantity to produce must be positive!'),
+ ]
+
+ @api.onchange('product_id')
+ def onchange_product_id(self):
+ if self.product_id:
+ for line in self.bom_line_ids:
+ line.bom_product_template_attribute_value_ids = False
+
+ @api.constrains('product_id', 'product_tmpl_id', 'bom_line_ids')
+ def _check_bom_lines(self):
+ for bom in self:
+ for bom_line in bom.bom_line_ids:
+ if bom.product_id:
+ same_product = bom.product_id == bom_line.product_id
+ else:
+ same_product = bom.product_tmpl_id == bom_line.product_id.product_tmpl_id
+ if same_product:
+ raise ValidationError(_("BoM line product %s should not be the same as BoM product.") % bom.display_name)
+ if bom.product_id and bom_line.bom_product_template_attribute_value_ids:
+ raise ValidationError(_("BoM cannot concern product %s and have a line with attributes (%s) at the same time.")
+ % (bom.product_id.display_name, ", ".join([ptav.display_name for ptav in bom_line.bom_product_template_attribute_value_ids])))
+ for ptav in bom_line.bom_product_template_attribute_value_ids:
+ if ptav.product_tmpl_id != bom.product_tmpl_id:
+ raise ValidationError(_(
+ "The attribute value %(attribute)s set on product %(product)s does not match the BoM product %(bom_product)s.",
+ attribute=ptav.display_name,
+ product=ptav.product_tmpl_id.display_name,
+ bom_product=bom_line.parent_product_tmpl_id.display_name
+ ))
+
+ @api.onchange('product_uom_id')
+ def onchange_product_uom_id(self):
+ res = {}
+ if not self.product_uom_id or not self.product_tmpl_id:
+ return
+ if self.product_uom_id.category_id.id != self.product_tmpl_id.uom_id.category_id.id:
+ self.product_uom_id = self.product_tmpl_id.uom_id.id
+ res['warning'] = {'title': _('Warning'), 'message': _('The Product Unit of Measure you chose has a different category than in the product form.')}
+ return res
+
+ @api.onchange('product_tmpl_id')
+ def onchange_product_tmpl_id(self):
+ if self.product_tmpl_id:
+ self.product_uom_id = self.product_tmpl_id.uom_id.id
+ if self.product_id.product_tmpl_id != self.product_tmpl_id:
+ self.product_id = False
+ for line in self.bom_line_ids:
+ line.bom_product_template_attribute_value_ids = False
+
+ def copy(self, default=None):
+ res = super().copy(default)
+ for bom_line in res.bom_line_ids:
+ if bom_line.operation_id:
+ operation = res.operation_ids.filtered(lambda op: op.name == bom_line.operation_id.name and op.workcenter_id == bom_line.operation_id.workcenter_id)
+ bom_line.operation_id = operation
+ return res
+
+ @api.model
+ def name_create(self, name):
+ # prevent to use string as product_tmpl_id
+ if isinstance(name, str):
+ raise UserError(_("You cannot create a new Bill of Material from here."))
+ return super(MrpBom, self).name_create(name)
+
+ def name_get(self):
+ return [(bom.id, '%s%s' % (bom.code and '%s: ' % bom.code or '', bom.product_tmpl_id.display_name)) for bom in self]
+
+ @api.constrains('product_tmpl_id', 'product_id', 'type')
+ def check_kit_has_not_orderpoint(self):
+ product_ids = [pid for bom in self.filtered(lambda bom: bom.type == "phantom")
+ for pid in (bom.product_id.ids or bom.product_tmpl_id.product_variant_ids.ids)]
+ if self.env['stock.warehouse.orderpoint'].search([('product_id', 'in', product_ids)], count=True):
+ raise ValidationError(_("You can not create a kit-type bill of materials for products that have at least one reordering rule."))
+
+ def unlink(self):
+ if self.env['mrp.production'].search([('bom_id', 'in', self.ids), ('state', 'not in', ['done', 'cancel'])], limit=1):
+ raise UserError(_('You can not delete a Bill of Material with running manufacturing orders.\nPlease close or cancel it first.'))
+ return super(MrpBom, self).unlink()
+
+ @api.model
+ def _bom_find_domain(self, product_tmpl=None, product=None, picking_type=None, company_id=False, bom_type=False):
+ if product:
+ if not product_tmpl:
+ product_tmpl = product.product_tmpl_id
+ domain = ['|', ('product_id', '=', product.id), '&', ('product_id', '=', False), ('product_tmpl_id', '=', product_tmpl.id)]
+ elif product_tmpl:
+ domain = [('product_tmpl_id', '=', product_tmpl.id)]
+ else:
+ # neither product nor template, makes no sense to search
+ raise UserError(_('You should provide either a product or a product template to search a BoM'))
+ if picking_type:
+ domain += ['|', ('picking_type_id', '=', picking_type.id), ('picking_type_id', '=', False)]
+ if company_id or self.env.context.get('company_id'):
+ domain = domain + ['|', ('company_id', '=', False), ('company_id', '=', company_id or self.env.context.get('company_id'))]
+ if bom_type:
+ domain += [('type', '=', bom_type)]
+ # order to prioritize bom with product_id over the one without
+ return domain
+
+ @api.model
+ def _bom_find(self, product_tmpl=None, product=None, picking_type=None, company_id=False, bom_type=False):
+ """ Finds BoM for particular product, picking and company """
+ if product and product.type == 'service' or product_tmpl and product_tmpl.type == 'service':
+ return self.env['mrp.bom']
+ domain = self._bom_find_domain(product_tmpl=product_tmpl, product=product, picking_type=picking_type, company_id=company_id, bom_type=bom_type)
+ if domain is False:
+ return self.env['mrp.bom']
+ return self.search(domain, order='sequence, product_id', limit=1)
+
+ @api.model
+ def _get_product2bom(self, products, bom_type=False, picking_type=False, company_id=False):
+ """Optimized variant of _bom_find to work with recordset"""
+
+ bom_by_product = defaultdict(lambda: self.env['mrp.bom'])
+ products = products.filtered(lambda p: p.type != 'service')
+ if not products:
+ return bom_by_product
+ product_templates = products.mapped('product_tmpl_id')
+ domain = ['|', ('product_id', 'in', products.ids), '&', ('product_id', '=', False), ('product_tmpl_id', 'in', product_templates.ids)]
+ if picking_type:
+ domain += ['|', ('picking_type_id', '=', picking_type.id), ('picking_type_id', '=', False)]
+ if company_id or self.env.context.get('company_id'):
+ domain = domain + ['|', ('company_id', '=', False), ('company_id', '=', company_id or self.env.context.get('company_id'))]
+ if bom_type:
+ domain += [('type', '=', bom_type)]
+
+ if len(products) == 1:
+ bom = self.search(domain, order='sequence, product_id', limit=1)
+ if bom:
+ bom_by_product[products] = bom
+ return bom_by_product
+
+ boms = self.search(domain, order='sequence, product_id')
+
+ products_ids = set(products.ids)
+ for bom in boms:
+ products_implies = bom.product_id or bom.product_tmpl_id.product_variant_ids
+ for product in products_implies:
+ if product.id in products_ids and product not in bom_by_product:
+ bom_by_product[product] = bom
+ return bom_by_product
+
+ def explode(self, product, quantity, picking_type=False):
+ """
+ Explodes the BoM and creates two lists with all the information you need: bom_done and line_done
+ Quantity describes the number of times you need the BoM: so the quantity divided by the number created by the BoM
+ and converted into its UoM
+ """
+ from collections import defaultdict
+
+ graph = defaultdict(list)
+ V = set()
+
+ def check_cycle(v, visited, recStack, graph):
+ visited[v] = True
+ recStack[v] = True
+ for neighbour in graph[v]:
+ if visited[neighbour] == False:
+ if check_cycle(neighbour, visited, recStack, graph) == True:
+ return True
+ elif recStack[neighbour] == True:
+ return True
+ recStack[v] = False
+ return False
+
+ product_ids = set()
+ product_boms = {}
+ def update_product_boms():
+ products = self.env['product.product'].browse(product_ids)
+ product_boms.update(self._get_product2bom(products, bom_type='phantom',
+ picking_type=picking_type or self.picking_type_id, company_id=self.company_id.id))
+ # Set missing keys to default value
+ for product in products:
+ product_boms.setdefault(product, self.env['mrp.bom'])
+
+ boms_done = [(self, {'qty': quantity, 'product': product, 'original_qty': quantity, 'parent_line': False})]
+ lines_done = []
+ V |= set([product.product_tmpl_id.id])
+
+ bom_lines = []
+ for bom_line in self.bom_line_ids:
+ product_id = bom_line.product_id
+ V |= set([product_id.product_tmpl_id.id])
+ graph[product.product_tmpl_id.id].append(product_id.product_tmpl_id.id)
+ bom_lines.append((bom_line, product, quantity, False))
+ product_ids.add(product_id.id)
+ update_product_boms()
+ product_ids.clear()
+ while bom_lines:
+ current_line, current_product, current_qty, parent_line = bom_lines[0]
+ bom_lines = bom_lines[1:]
+
+ if current_line._skip_bom_line(current_product):
+ continue
+
+ line_quantity = current_qty * current_line.product_qty
+ if not current_line.product_id in product_boms:
+ update_product_boms()
+ product_ids.clear()
+ bom = product_boms.get(current_line.product_id)
+ if bom:
+ converted_line_quantity = current_line.product_uom_id._compute_quantity(line_quantity / bom.product_qty, bom.product_uom_id)
+ bom_lines += [(line, current_line.product_id, converted_line_quantity, current_line) for line in bom.bom_line_ids]
+ for bom_line in bom.bom_line_ids:
+ graph[current_line.product_id.product_tmpl_id.id].append(bom_line.product_id.product_tmpl_id.id)
+ if bom_line.product_id.product_tmpl_id.id in V and check_cycle(bom_line.product_id.product_tmpl_id.id, {key: False for key in V}, {key: False for key in V}, graph):
+ raise UserError(_('Recursion error! A product with a Bill of Material should not have itself in its BoM or child BoMs!'))
+ V |= set([bom_line.product_id.product_tmpl_id.id])
+ if not bom_line.product_id in product_boms:
+ product_ids.add(bom_line.product_id.id)
+ boms_done.append((bom, {'qty': converted_line_quantity, 'product': current_product, 'original_qty': quantity, 'parent_line': current_line}))
+ else:
+ # We round up here because the user expects that if he has to consume a little more, the whole UOM unit
+ # should be consumed.
+ rounding = current_line.product_uom_id.rounding
+ line_quantity = float_round(line_quantity, precision_rounding=rounding, rounding_method='UP')
+ lines_done.append((current_line, {'qty': line_quantity, 'product': current_product, 'original_qty': quantity, 'parent_line': parent_line}))
+
+ return boms_done, lines_done
+
+ @api.model
+ def get_import_templates(self):
+ return [{
+ 'label': _('Import Template for Bills of Materials'),
+ 'template': '/mrp/static/xls/mrp_bom.xls'
+ }]
+
+
+class MrpBomLine(models.Model):
+ _name = 'mrp.bom.line'
+ _order = "sequence, id"
+ _rec_name = "product_id"
+ _description = 'Bill of Material Line'
+ _check_company_auto = True
+
+ def _get_default_product_uom_id(self):
+ return self.env['uom.uom'].search([], limit=1, order='id').id
+
+ product_id = fields.Many2one('product.product', 'Component', required=True, check_company=True)
+ product_tmpl_id = fields.Many2one('product.template', 'Product Template', related='product_id.product_tmpl_id')
+ company_id = fields.Many2one(
+ related='bom_id.company_id', store=True, index=True, readonly=True)
+ product_qty = fields.Float(
+ 'Quantity', default=1.0,
+ digits='Product Unit of Measure', required=True)
+ product_uom_id = fields.Many2one(
+ 'uom.uom', 'Product Unit of Measure',
+ default=_get_default_product_uom_id,
+ required=True,
+ help="Unit of Measure (Unit of Measure) is the unit of measurement for the inventory control", domain="[('category_id', '=', product_uom_category_id)]")
+ product_uom_category_id = fields.Many2one(related='product_id.uom_id.category_id')
+ sequence = fields.Integer(
+ 'Sequence', default=1,
+ help="Gives the sequence order when displaying.")
+ bom_id = fields.Many2one(
+ 'mrp.bom', 'Parent BoM',
+ index=True, ondelete='cascade', required=True)
+ parent_product_tmpl_id = fields.Many2one('product.template', 'Parent Product Template', related='bom_id.product_tmpl_id')
+ possible_bom_product_template_attribute_value_ids = fields.Many2many('product.template.attribute.value', compute='_compute_possible_bom_product_template_attribute_value_ids')
+ bom_product_template_attribute_value_ids = fields.Many2many(
+ 'product.template.attribute.value', string="Apply on Variants", ondelete='restrict',
+ domain="[('id', 'in', possible_bom_product_template_attribute_value_ids)]",
+ help="BOM Product Variants needed to apply this line.")
+ allowed_operation_ids = fields.Many2many('mrp.routing.workcenter', compute='_compute_allowed_operation_ids')
+ operation_id = fields.Many2one(
+ 'mrp.routing.workcenter', 'Consumed in Operation', check_company=True,
+ domain="[('id', 'in', allowed_operation_ids)]",
+ help="The operation where the components are consumed, or the finished products created.")
+ child_bom_id = fields.Many2one(
+ 'mrp.bom', 'Sub BoM', compute='_compute_child_bom_id')
+ child_line_ids = fields.One2many(
+ 'mrp.bom.line', string="BOM lines of the referred bom",
+ compute='_compute_child_line_ids')
+ attachments_count = fields.Integer('Attachments Count', compute='_compute_attachments_count')
+
+ _sql_constraints = [
+ ('bom_qty_zero', 'CHECK (product_qty>=0)', 'All product quantities must be greater or equal to 0.\n'
+ 'Lines with 0 quantities can be used as optional lines. \n'
+ 'You should install the mrp_byproduct module if you want to manage extra products on BoMs !'),
+ ]
+
+ @api.depends(
+ 'parent_product_tmpl_id.attribute_line_ids.value_ids',
+ 'parent_product_tmpl_id.attribute_line_ids.attribute_id.create_variant',
+ 'parent_product_tmpl_id.attribute_line_ids.product_template_value_ids.ptav_active',
+ )
+ def _compute_possible_bom_product_template_attribute_value_ids(self):
+ for line in self:
+ line.possible_bom_product_template_attribute_value_ids = line.parent_product_tmpl_id.valid_product_template_attribute_line_ids._without_no_variant_attributes().product_template_value_ids._only_active()
+
+ @api.depends('product_id', 'bom_id')
+ def _compute_child_bom_id(self):
+ for line in self:
+ if not line.product_id:
+ line.child_bom_id = False
+ else:
+ line.child_bom_id = self.env['mrp.bom']._bom_find(
+ product_tmpl=line.product_id.product_tmpl_id,
+ product=line.product_id)
+
+ @api.depends('product_id')
+ def _compute_attachments_count(self):
+ for line in self:
+ nbr_attach = self.env['mrp.document'].search_count([
+ '|',
+ '&', ('res_model', '=', 'product.product'), ('res_id', '=', line.product_id.id),
+ '&', ('res_model', '=', 'product.template'), ('res_id', '=', line.product_id.product_tmpl_id.id)])
+ line.attachments_count = nbr_attach
+
+ @api.depends('child_bom_id')
+ def _compute_child_line_ids(self):
+ """ If the BOM line refers to a BOM, return the ids of the child BOM lines """
+ for line in self:
+ line.child_line_ids = line.child_bom_id.bom_line_ids.ids or False
+
+ @api.depends('bom_id')
+ def _compute_allowed_operation_ids(self):
+ for bom_line in self:
+ if not bom_line.bom_id.operation_ids:
+ bom_line.allowed_operation_ids = self.env['mrp.routing.workcenter']
+ else:
+ operation_domain = [
+ ('id', 'in', bom_line.bom_id.operation_ids.ids),
+ '|',
+ ('company_id', '=', bom_line.company_id.id),
+ ('company_id', '=', False)
+ ]
+ bom_line.allowed_operation_ids = self.env['mrp.routing.workcenter'].search(operation_domain)
+
+ @api.onchange('product_uom_id')
+ def onchange_product_uom_id(self):
+ res = {}
+ if not self.product_uom_id or not self.product_id:
+ return res
+ if self.product_uom_id.category_id != self.product_id.uom_id.category_id:
+ self.product_uom_id = self.product_id.uom_id.id
+ res['warning'] = {'title': _('Warning'), 'message': _('The Product Unit of Measure you chose has a different category than in the product form.')}
+ return res
+
+ @api.onchange('product_id')
+ def onchange_product_id(self):
+ if self.product_id:
+ self.product_uom_id = self.product_id.uom_id.id
+
+ @api.model_create_multi
+ def create(self, vals_list):
+ for values in vals_list:
+ if 'product_id' in values and 'product_uom_id' not in values:
+ values['product_uom_id'] = self.env['product.product'].browse(values['product_id']).uom_id.id
+ return super(MrpBomLine, self).create(vals_list)
+
+ def _skip_bom_line(self, product):
+ """ Control if a BoM line should be produced, can be inherited to add
+ custom control. It currently checks that all variant values are in the
+ product.
+
+ If multiple values are encoded for the same attribute line, only one of
+ them has to be found on the variant.
+ """
+ self.ensure_one()
+ if product._name == 'product.template':
+ return False
+ if self.bom_product_template_attribute_value_ids:
+ for ptal, iter_ptav in groupby(self.bom_product_template_attribute_value_ids.sorted('attribute_line_id'), lambda ptav: ptav.attribute_line_id):
+ if not any(ptav in product.product_template_attribute_value_ids for ptav in iter_ptav):
+ return True
+ return False
+
+ def action_see_attachments(self):
+ domain = [
+ '|',
+ '&', ('res_model', '=', 'product.product'), ('res_id', '=', self.product_id.id),
+ '&', ('res_model', '=', 'product.template'), ('res_id', '=', self.product_id.product_tmpl_id.id)]
+ attachment_view = self.env.ref('mrp.view_document_file_kanban_mrp')
+ return {
+ 'name': _('Attachments'),
+ 'domain': domain,
+ 'res_model': 'mrp.document',
+ 'type': 'ir.actions.act_window',
+ 'view_id': attachment_view.id,
+ 'views': [(attachment_view.id, 'kanban'), (False, 'form')],
+ 'view_mode': 'kanban,tree,form',
+ 'help': _('''<p class="o_view_nocontent_smiling_face">
+ Upload files to your product
+ </p><p>
+ Use this feature to store any files, like drawings or specifications.
+ </p>'''),
+ 'limit': 80,
+ 'context': "{'default_res_model': '%s','default_res_id': %d, 'default_company_id': %s}" % ('product.product', self.product_id.id, self.company_id.id)
+ }
+
+
+class MrpByProduct(models.Model):
+ _name = 'mrp.bom.byproduct'
+ _description = 'Byproduct'
+ _rec_name = "product_id"
+ _check_company_auto = True
+
+ product_id = fields.Many2one('product.product', 'By-product', required=True, check_company=True)
+ company_id = fields.Many2one(related='bom_id.company_id', store=True, index=True, readonly=True)
+ product_qty = fields.Float(
+ 'Quantity',
+ default=1.0, digits='Product Unit of Measure', required=True)
+ product_uom_id = fields.Many2one('uom.uom', 'Unit of Measure', required=True)
+ bom_id = fields.Many2one('mrp.bom', 'BoM', ondelete='cascade')
+ allowed_operation_ids = fields.Many2many('mrp.routing.workcenter', compute='_compute_allowed_operation_ids')
+ operation_id = fields.Many2one(
+ 'mrp.routing.workcenter', 'Produced in Operation', check_company=True,
+ domain="[('id', 'in', allowed_operation_ids)]")
+
+ @api.depends('bom_id')
+ def _compute_allowed_operation_ids(self):
+ for byproduct in self:
+ if not byproduct.bom_id.operation_ids:
+ byproduct.allowed_operation_ids = self.env['mrp.routing.workcenter']
+ else:
+ operation_domain = [
+ ('id', 'in', byproduct.bom_id.operation_ids.ids),
+ '|',
+ ('company_id', '=', byproduct.company_id.id),
+ ('company_id', '=', False)
+ ]
+ byproduct.allowed_operation_ids = self.env['mrp.routing.workcenter'].search(operation_domain)
+
+ @api.onchange('product_id')
+ def onchange_product_id(self):
+ """ Changes UoM if product_id changes. """
+ if self.product_id:
+ self.product_uom_id = self.product_id.uom_id.id
+
+ @api.onchange('product_uom_id')
+ def onchange_uom(self):
+ res = {}
+ if self.product_uom_id and self.product_id and self.product_uom_id.category_id != self.product_id.uom_id.category_id:
+ res['warning'] = {
+ 'title': _('Warning'),
+ 'message': _('The unit of measure you choose is in a different category than the product unit of measure.')
+ }
+ self.product_uom_id = self.product_id.uom_id.id
+ return res
diff --git a/addons/mrp/models/mrp_document.py b/addons/mrp/models/mrp_document.py
new file mode 100644
index 00000000..73009000
--- /dev/null
+++ b/addons/mrp/models/mrp_document.py
@@ -0,0 +1,24 @@
+# -*- coding: utf-8 -*-
+# Part of Odoo. See LICENSE file for full copyright and licensing details.
+
+from odoo import fields, models
+
+
+class MrpDocument(models.Model):
+ """ Extension of ir.attachment only used in MRP to handle archivage
+ and basic versioning.
+ """
+ _name = 'mrp.document'
+ _description = "Production Document"
+ _inherits = {
+ 'ir.attachment': 'ir_attachment_id',
+ }
+ _order = "priority desc, id desc"
+
+ ir_attachment_id = fields.Many2one('ir.attachment', string='Related attachment', required=True, ondelete='cascade')
+ active = fields.Boolean('Active', default=True)
+ priority = fields.Selection([
+ ('0', 'Normal'),
+ ('1', 'Low'),
+ ('2', 'High'),
+ ('3', 'Very High')], string="Priority", help='Gives the sequence order when displaying a list of MRP documents.')
diff --git a/addons/mrp/models/mrp_production.py b/addons/mrp/models/mrp_production.py
new file mode 100644
index 00000000..2ed032a2
--- /dev/null
+++ b/addons/mrp/models/mrp_production.py
@@ -0,0 +1,1801 @@
+# -*- coding: utf-8 -*-
+# Part of Odoo. See LICENSE file for full copyright and licensing details.
+
+import json
+import datetime
+import math
+import operator as py_operator
+import re
+
+from collections import defaultdict
+from dateutil.relativedelta import relativedelta
+from itertools import groupby
+
+from odoo import api, fields, models, _
+from odoo.exceptions import AccessError, UserError
+from odoo.tools import float_compare, float_round, float_is_zero, format_datetime
+from odoo.tools.misc import format_date
+
+from odoo.addons.stock.models.stock_move import PROCUREMENT_PRIORITIES
+
+SIZE_BACK_ORDER_NUMERING = 3
+
+
+class MrpProduction(models.Model):
+ """ Manufacturing Orders """
+ _name = 'mrp.production'
+ _description = 'Production Order'
+ _date_name = 'date_planned_start'
+ _inherit = ['mail.thread', 'mail.activity.mixin']
+ _order = 'priority desc, date_planned_start asc,id'
+
+ @api.model
+ def _get_default_picking_type(self):
+ company_id = self.env.context.get('default_company_id', self.env.company.id)
+ return self.env['stock.picking.type'].search([
+ ('code', '=', 'mrp_operation'),
+ ('warehouse_id.company_id', '=', company_id),
+ ], limit=1).id
+
+ @api.model
+ def _get_default_location_src_id(self):
+ location = False
+ company_id = self.env.context.get('default_company_id', self.env.company.id)
+ if self.env.context.get('default_picking_type_id'):
+ location = self.env['stock.picking.type'].browse(self.env.context['default_picking_type_id']).default_location_src_id
+ if not location:
+ location = self.env['stock.warehouse'].search([('company_id', '=', company_id)], limit=1).lot_stock_id
+ return location and location.id or False
+
+ @api.model
+ def _get_default_location_dest_id(self):
+ location = False
+ company_id = self.env.context.get('default_company_id', self.env.company.id)
+ if self._context.get('default_picking_type_id'):
+ location = self.env['stock.picking.type'].browse(self.env.context['default_picking_type_id']).default_location_dest_id
+ if not location:
+ location = self.env['stock.warehouse'].search([('company_id', '=', company_id)], limit=1).lot_stock_id
+ return location and location.id or False
+
+ @api.model
+ def _get_default_date_planned_finished(self):
+ if self.env.context.get('default_date_planned_start'):
+ return fields.Datetime.to_datetime(self.env.context.get('default_date_planned_start')) + datetime.timedelta(hours=1)
+ return datetime.datetime.now() + datetime.timedelta(hours=1)
+
+ @api.model
+ def _get_default_date_planned_start(self):
+ if self.env.context.get('default_date_deadline'):
+ return fields.Datetime.to_datetime(self.env.context.get('default_date_deadline'))
+ return datetime.datetime.now()
+
+ @api.model
+ def _get_default_is_locked(self):
+ return self.user_has_groups('mrp.group_locked_by_default')
+
+ name = fields.Char(
+ 'Reference', copy=False, readonly=True, default=lambda x: _('New'))
+ priority = fields.Selection(
+ PROCUREMENT_PRIORITIES, string='Priority', default='0', index=True,
+ help="Components will be reserved first for the MO with the highest priorities.")
+ backorder_sequence = fields.Integer("Backorder Sequence", default=0, copy=False, help="Backorder sequence, if equals to 0 means there is not related backorder")
+ origin = fields.Char(
+ 'Source', copy=False,
+ states={'done': [('readonly', True)], 'cancel': [('readonly', True)]},
+ help="Reference of the document that generated this production order request.")
+
+ product_id = fields.Many2one(
+ 'product.product', 'Product',
+ domain="""[
+ ('type', 'in', ['product', 'consu']),
+ '|',
+ ('company_id', '=', False),
+ ('company_id', '=', company_id)
+ ]
+ """,
+ readonly=True, required=True, check_company=True,
+ states={'draft': [('readonly', False)]})
+ product_tracking = fields.Selection(related='product_id.tracking')
+ allowed_product_ids = fields.Many2many('product.product', compute='_compute_allowed_product_ids')
+ product_tmpl_id = fields.Many2one('product.template', 'Product Template', related='product_id.product_tmpl_id')
+ product_qty = fields.Float(
+ 'Quantity To Produce',
+ default=1.0, digits='Product Unit of Measure',
+ readonly=True, required=True, tracking=True,
+ states={'draft': [('readonly', False)]})
+ product_uom_id = fields.Many2one(
+ 'uom.uom', 'Product Unit of Measure',
+ readonly=True, required=True,
+ states={'draft': [('readonly', False)]}, domain="[('category_id', '=', product_uom_category_id)]")
+ lot_producing_id = fields.Many2one(
+ 'stock.production.lot', string='Lot/Serial Number', copy=False,
+ domain="[('product_id', '=', product_id), ('company_id', '=', company_id)]", check_company=True)
+ qty_producing = fields.Float(string="Quantity Producing", digits='Product Unit of Measure', copy=False)
+ product_uom_category_id = fields.Many2one(related='product_id.uom_id.category_id')
+ product_uom_qty = fields.Float(string='Total Quantity', compute='_compute_product_uom_qty', store=True)
+ picking_type_id = fields.Many2one(
+ 'stock.picking.type', 'Operation Type',
+ domain="[('code', '=', 'mrp_operation'), ('company_id', '=', company_id)]",
+ default=_get_default_picking_type, required=True, check_company=True)
+ use_create_components_lots = fields.Boolean(related='picking_type_id.use_create_components_lots')
+ location_src_id = fields.Many2one(
+ 'stock.location', 'Components Location',
+ default=_get_default_location_src_id,
+ readonly=True, required=True,
+ domain="[('usage','=','internal'), '|', ('company_id', '=', False), ('company_id', '=', company_id)]",
+ states={'draft': [('readonly', False)]}, check_company=True,
+ help="Location where the system will look for components.")
+ location_dest_id = fields.Many2one(
+ 'stock.location', 'Finished Products Location',
+ default=_get_default_location_dest_id,
+ readonly=True, required=True,
+ domain="[('usage','=','internal'), '|', ('company_id', '=', False), ('company_id', '=', company_id)]",
+ states={'draft': [('readonly', False)]}, check_company=True,
+ help="Location where the system will stock the finished products.")
+ date_planned_start = fields.Datetime(
+ 'Scheduled Date', copy=False, default=_get_default_date_planned_start,
+ help="Date at which you plan to start the production.",
+ index=True, required=True)
+ date_planned_finished = fields.Datetime(
+ 'Scheduled End Date',
+ default=_get_default_date_planned_finished,
+ help="Date at which you plan to finish the production.",
+ copy=False)
+ date_deadline = fields.Datetime(
+ 'Deadline', copy=False, store=True, readonly=True, compute='_compute_date_deadline', inverse='_set_date_deadline',
+ help="Informative date allowing to define when the manufacturing order should be processed at the latest to fulfill delivery on time.")
+ date_start = fields.Datetime('Start Date', copy=False, index=True, readonly=True)
+ date_finished = fields.Datetime('End Date', copy=False, index=True, readonly=True)
+ bom_id = fields.Many2one(
+ 'mrp.bom', 'Bill of Material',
+ readonly=True, states={'draft': [('readonly', False)]},
+ domain="""[
+ '&',
+ '|',
+ ('company_id', '=', False),
+ ('company_id', '=', company_id),
+ '&',
+ '|',
+ ('product_id','=',product_id),
+ '&',
+ ('product_tmpl_id.product_variant_ids','=',product_id),
+ ('product_id','=',False),
+ ('type', '=', 'normal')]""",
+ check_company=True,
+ help="Bill of Materials allow you to define the list of required components to make a finished product.")
+
+ state = fields.Selection([
+ ('draft', 'Draft'),
+ ('confirmed', 'Confirmed'),
+ ('progress', 'In Progress'),
+ ('to_close', 'To Close'),
+ ('done', 'Done'),
+ ('cancel', 'Cancelled')], string='State',
+ compute='_compute_state', copy=False, index=True, readonly=True,
+ store=True, tracking=True,
+ help=" * Draft: The MO is not confirmed yet.\n"
+ " * Confirmed: The MO is confirmed, the stock rules and the reordering of the components are trigerred.\n"
+ " * In Progress: The production has started (on the MO or on the WO).\n"
+ " * To Close: The production is done, the MO has to be closed.\n"
+ " * Done: The MO is closed, the stock moves are posted. \n"
+ " * Cancelled: The MO has been cancelled, can't be confirmed anymore.")
+ reservation_state = fields.Selection([
+ ('confirmed', 'Waiting'),
+ ('assigned', 'Ready'),
+ ('waiting', 'Waiting Another Operation')],
+ string='Material Availability',
+ compute='_compute_state', copy=False, index=True, readonly=True,
+ store=True, tracking=True,
+ help=" * Ready: The material is available to start the production.\n\
+ * Waiting: The material is not available to start the production.\n\
+ The material availability is impacted by the manufacturing readiness\
+ defined on the BoM.")
+
+ move_raw_ids = fields.One2many(
+ 'stock.move', 'raw_material_production_id', 'Components',
+ copy=False, states={'done': [('readonly', True)], 'cancel': [('readonly', True)]},
+ domain=[('scrapped', '=', False)])
+ move_finished_ids = fields.One2many(
+ 'stock.move', 'production_id', 'Finished Products',
+ copy=False, states={'done': [('readonly', True)], 'cancel': [('readonly', True)]},
+ domain=[('scrapped', '=', False)])
+ move_byproduct_ids = fields.One2many('stock.move', compute='_compute_move_byproduct_ids', inverse='_set_move_byproduct_ids')
+ finished_move_line_ids = fields.One2many(
+ 'stock.move.line', compute='_compute_lines', inverse='_inverse_lines', string="Finished Product"
+ )
+ workorder_ids = fields.One2many(
+ 'mrp.workorder', 'production_id', 'Work Orders', copy=True)
+ workorder_done_count = fields.Integer('# Done Work Orders', compute='_compute_workorder_done_count')
+ move_dest_ids = fields.One2many('stock.move', 'created_production_id',
+ string="Stock Movements of Produced Goods")
+
+ unreserve_visible = fields.Boolean(
+ 'Allowed to Unreserve Production', compute='_compute_unreserve_visible',
+ help='Technical field to check when we can unreserve')
+ reserve_visible = fields.Boolean(
+ 'Allowed to Reserve Production', compute='_compute_unreserve_visible',
+ help='Technical field to check when we can reserve quantities')
+ user_id = fields.Many2one(
+ 'res.users', 'Responsible', default=lambda self: self.env.user,
+ states={'done': [('readonly', True)], 'cancel': [('readonly', True)]},
+ domain=lambda self: [('groups_id', 'in', self.env.ref('mrp.group_mrp_user').id)])
+ company_id = fields.Many2one(
+ 'res.company', 'Company', default=lambda self: self.env.company,
+ index=True, required=True)
+
+ qty_produced = fields.Float(compute="_get_produced_qty", string="Quantity Produced")
+ procurement_group_id = fields.Many2one(
+ 'procurement.group', 'Procurement Group',
+ copy=False)
+ product_description_variants = fields.Char('Custom Description')
+ orderpoint_id = fields.Many2one('stock.warehouse.orderpoint', 'Orderpoint')
+ propagate_cancel = fields.Boolean(
+ 'Propagate cancel and split',
+ help='If checked, when the previous move of the move (which was generated by a next procurement) is cancelled or split, the move generated by this move will too')
+ delay_alert_date = fields.Datetime('Delay Alert Date', compute='_compute_delay_alert_date', search='_search_delay_alert_date')
+ json_popover = fields.Char('JSON data for the popover widget', compute='_compute_json_popover')
+ scrap_ids = fields.One2many('stock.scrap', 'production_id', 'Scraps')
+ scrap_count = fields.Integer(compute='_compute_scrap_move_count', string='Scrap Move')
+ is_locked = fields.Boolean('Is Locked', default=_get_default_is_locked, copy=False)
+ is_planned = fields.Boolean('Its Operations are Planned', compute='_compute_is_planned', search='_search_is_planned')
+
+ show_final_lots = fields.Boolean('Show Final Lots', compute='_compute_show_lots')
+ production_location_id = fields.Many2one('stock.location', "Production Location", compute="_compute_production_location", store=True)
+ picking_ids = fields.Many2many('stock.picking', compute='_compute_picking_ids', string='Picking associated to this manufacturing order')
+ delivery_count = fields.Integer(string='Delivery Orders', compute='_compute_picking_ids')
+ confirm_cancel = fields.Boolean(compute='_compute_confirm_cancel')
+ consumption = fields.Selection([
+ ('flexible', 'Allowed'),
+ ('warning', 'Allowed with warning'),
+ ('strict', 'Blocked')],
+ required=True,
+ readonly=True,
+ default='flexible',
+ )
+
+ mrp_production_child_count = fields.Integer("Number of generated MO", compute='_compute_mrp_production_child_count')
+ mrp_production_source_count = fields.Integer("Number of source MO", compute='_compute_mrp_production_source_count')
+ mrp_production_backorder_count = fields.Integer("Count of linked backorder", compute='_compute_mrp_production_backorder')
+ show_lock = fields.Boolean('Show Lock/unlock buttons', compute='_compute_show_lock')
+ components_availability = fields.Char(
+ string="Component Availability", compute='_compute_components_availability')
+ components_availability_state = fields.Selection([
+ ('available', 'Available'),
+ ('expected', 'Expected'),
+ ('late', 'Late')], compute='_compute_components_availability')
+ show_lot_ids = fields.Boolean('Display the serial number shortcut on the moves', compute='_compute_show_lot_ids')
+
+ @api.depends('product_id', 'bom_id', 'company_id')
+ def _compute_allowed_product_ids(self):
+ for production in self:
+ product_domain = [
+ ('type', 'in', ['product', 'consu']),
+ '|',
+ ('company_id', '=', False),
+ ('company_id', '=', production.company_id.id)
+ ]
+ if production.bom_id:
+ if production.bom_id.product_id:
+ product_domain += [('id', '=', production.bom_id.product_id.id)]
+ else:
+ product_domain += [('id', 'in', production.bom_id.product_tmpl_id.product_variant_ids.ids)]
+ production.allowed_product_ids = self.env['product.product'].search(product_domain)
+
+ @api.depends('procurement_group_id.stock_move_ids.created_production_id.procurement_group_id.mrp_production_ids')
+ def _compute_mrp_production_child_count(self):
+ for production in self:
+ production.mrp_production_child_count = len(production.procurement_group_id.stock_move_ids.created_production_id.procurement_group_id.mrp_production_ids - production)
+
+ @api.depends('move_dest_ids.group_id.mrp_production_ids')
+ def _compute_mrp_production_source_count(self):
+ for production in self:
+ production.mrp_production_source_count = len(production.procurement_group_id.mrp_production_ids.move_dest_ids.group_id.mrp_production_ids - production)
+
+ @api.depends('procurement_group_id.mrp_production_ids')
+ def _compute_mrp_production_backorder(self):
+ for production in self:
+ production.mrp_production_backorder_count = len(production.procurement_group_id.mrp_production_ids)
+
+ @api.depends('move_raw_ids', 'state', 'date_planned_start', 'move_raw_ids.forecast_availability', 'move_raw_ids.forecast_expected_date')
+ def _compute_components_availability(self):
+ self.components_availability = False
+ self.components_availability_state = 'available'
+ productions = self.filtered(lambda mo: mo.state not in ['cancel', 'draft', 'done'])
+ productions.components_availability = _('Available')
+ for production in productions:
+ forecast_date = max(production.move_raw_ids.filtered('forecast_expected_date').mapped('forecast_expected_date'), default=False)
+ if any(float_compare(move.forecast_availability, move.product_qty, move.product_id.uom_id.rounding) == -1 for move in production.move_raw_ids):
+ production.components_availability = _('Not Available')
+ production.components_availability_state = 'late'
+ elif forecast_date:
+ production.components_availability = _('Exp %s', format_date(self.env, forecast_date))
+ production.components_availability_state = 'late' if forecast_date > production.date_planned_start else 'expected'
+
+ @api.depends('move_finished_ids.date_deadline')
+ def _compute_date_deadline(self):
+ for production in self:
+ production.date_deadline = min(production.move_finished_ids.filtered('date_deadline').mapped('date_deadline'), default=production.date_deadline or False)
+
+ def _set_date_deadline(self):
+ for production in self:
+ production.move_finished_ids.date_deadline = production.date_deadline
+
+ @api.depends("workorder_ids.date_planned_start", "workorder_ids.date_planned_finished")
+ def _compute_is_planned(self):
+ for production in self:
+ if production.workorder_ids:
+ production.is_planned = any(wo.date_planned_start and wo.date_planned_finished for wo in production.workorder_ids if wo.state != 'done')
+ else:
+ production.is_planned = False
+
+ def _search_is_planned(self, operator, value):
+ if operator not in ('=', '!='):
+ raise UserError(_('Invalid domain operator %s', operator))
+
+ if value not in (False, True):
+ raise UserError(_('Invalid domain right operand %s', value))
+ ops = {'=': py_operator.eq, '!=': py_operator.ne}
+ ids = []
+ for mo in self.search([]):
+ if ops[operator](value, mo.is_planned):
+ ids.append(mo.id)
+
+ return [('id', 'in', ids)]
+
+ @api.depends('move_raw_ids.delay_alert_date')
+ def _compute_delay_alert_date(self):
+ delay_alert_date_data = self.env['stock.move'].read_group([('id', 'in', self.move_raw_ids.ids), ('delay_alert_date', '!=', False)], ['delay_alert_date:max'], 'raw_material_production_id')
+ delay_alert_date_data = {data['raw_material_production_id'][0]: data['delay_alert_date'] for data in delay_alert_date_data}
+ for production in self:
+ production.delay_alert_date = delay_alert_date_data.get(production.id, False)
+
+ def _compute_json_popover(self):
+ for production in self:
+ production.json_popover = json.dumps({
+ 'popoverTemplate': 'stock.PopoverStockRescheduling',
+ 'delay_alert_date': format_datetime(self.env, production.delay_alert_date, dt_format=False) if production.delay_alert_date else False,
+ 'late_elements': [{
+ 'id': late_document.id,
+ 'name': late_document.display_name,
+ 'model': late_document._name,
+ } for late_document in production.move_raw_ids.filtered(lambda m: m.delay_alert_date).move_orig_ids._delay_alert_get_documents()
+ ]
+ })
+
+ @api.depends('move_raw_ids.state', 'move_finished_ids.state')
+ def _compute_confirm_cancel(self):
+ """ If the manufacturing order contains some done move (via an intermediate
+ post inventory), the user has to confirm the cancellation.
+ """
+ domain = [
+ ('state', '=', 'done'),
+ '|',
+ ('production_id', 'in', self.ids),
+ ('raw_material_production_id', 'in', self.ids)
+ ]
+ res = self.env['stock.move'].read_group(domain, ['state', 'production_id', 'raw_material_production_id'], ['production_id', 'raw_material_production_id'], lazy=False)
+ productions_with_done_move = {}
+ for rec in res:
+ production_record = rec['production_id'] or rec['raw_material_production_id']
+ if production_record:
+ productions_with_done_move[production_record[0]] = True
+ for production in self:
+ production.confirm_cancel = productions_with_done_move.get(production.id, False)
+
+ @api.depends('procurement_group_id')
+ def _compute_picking_ids(self):
+ for order in self:
+ order.picking_ids = self.env['stock.picking'].search([
+ ('group_id', '=', order.procurement_group_id.id), ('group_id', '!=', False),
+ ])
+ order.delivery_count = len(order.picking_ids)
+
+ def action_view_mo_delivery(self):
+ """ This function returns an action that display picking related to
+ manufacturing order orders. It can either be a in a list or in a form
+ view, if there is only one picking to show.
+ """
+ self.ensure_one()
+ action = self.env["ir.actions.actions"]._for_xml_id("stock.action_picking_tree_all")
+ pickings = self.mapped('picking_ids')
+ if len(pickings) > 1:
+ action['domain'] = [('id', 'in', pickings.ids)]
+ elif pickings:
+ form_view = [(self.env.ref('stock.view_picking_form').id, 'form')]
+ if 'views' in action:
+ action['views'] = form_view + [(state,view) for state,view in action['views'] if view != 'form']
+ else:
+ action['views'] = form_view
+ action['res_id'] = pickings.id
+ action['context'] = dict(self._context, default_origin=self.name, create=False)
+ return action
+
+ @api.depends('product_uom_id', 'product_qty', 'product_id.uom_id')
+ def _compute_product_uom_qty(self):
+ for production in self:
+ if production.product_id.uom_id != production.product_uom_id:
+ production.product_uom_qty = production.product_uom_id._compute_quantity(production.product_qty, production.product_id.uom_id)
+ else:
+ production.product_uom_qty = production.product_qty
+
+ @api.depends('product_id', 'company_id')
+ def _compute_production_location(self):
+ if not self.company_id:
+ return
+ location_by_company = self.env['stock.location'].read_group([
+ ('company_id', 'in', self.company_id.ids),
+ ('usage', '=', 'production')
+ ], ['company_id', 'ids:array_agg(id)'], ['company_id'])
+ location_by_company = {lbc['company_id'][0]: lbc['ids'] for lbc in location_by_company}
+ for production in self:
+ if production.product_id:
+ production.production_location_id = production.product_id.with_company(production.company_id).property_stock_production
+ else:
+ production.production_location_id = location_by_company.get(production.company_id.id)[0]
+
+ @api.depends('product_id.tracking')
+ def _compute_show_lots(self):
+ for production in self:
+ production.show_final_lots = production.product_id.tracking != 'none'
+
+ def _inverse_lines(self):
+ """ Little hack to make sure that when you change something on these objects, it gets saved"""
+ pass
+
+ @api.depends('move_finished_ids.move_line_ids')
+ def _compute_lines(self):
+ for production in self:
+ production.finished_move_line_ids = production.move_finished_ids.mapped('move_line_ids')
+
+ @api.depends('workorder_ids.state')
+ def _compute_workorder_done_count(self):
+ data = self.env['mrp.workorder'].read_group([
+ ('production_id', 'in', self.ids),
+ ('state', '=', 'done')], ['production_id'], ['production_id'])
+ count_data = dict((item['production_id'][0], item['production_id_count']) for item in data)
+ for production in self:
+ production.workorder_done_count = count_data.get(production.id, 0)
+
+ @api.depends(
+ 'move_raw_ids.state', 'move_raw_ids.quantity_done', 'move_finished_ids.state',
+ 'workorder_ids', 'workorder_ids.state', 'product_qty', 'qty_producing')
+ def _compute_state(self):
+ """ Compute the production state. It use the same process than stock
+ picking. It exists 3 extra steps for production:
+ - progress: At least one item is produced or consumed.
+ - to_close: The quantity produced is greater than the quantity to
+ produce and all work orders has been finished.
+ """
+ # TODO: duplicated code with stock_picking.py
+ for production in self:
+ if not production.move_raw_ids:
+ production.state = 'draft'
+ elif all(move.state == 'draft' for move in production.move_raw_ids):
+ production.state = 'draft'
+ elif all(move.state == 'cancel' for move in production.move_raw_ids):
+ production.state = 'cancel'
+ elif all(move.state in ('cancel', 'done') for move in production.move_raw_ids):
+ production.state = 'done'
+ elif production.workorder_ids and all(wo_state in ('done', 'cancel') for wo_state in production.workorder_ids.mapped('state')):
+ production.state = 'to_close'
+ elif not production.workorder_ids and production.qty_producing >= production.product_qty:
+ production.state = 'to_close'
+ elif any(wo_state in ('progress', 'done') for wo_state in production.workorder_ids.mapped('state')):
+ production.state = 'progress'
+ elif not float_is_zero(production.qty_producing, precision_rounding=production.product_uom_id.rounding):
+ production.state = 'progress'
+ elif any(not float_is_zero(move.quantity_done, precision_rounding=move.product_uom.rounding or move.product_id.uom_id.rounding) for move in production.move_raw_ids):
+ production.state = 'progress'
+ else:
+ production.state = 'confirmed'
+
+ # Compute reservation state
+ # State where the reservation does not matter.
+ production.reservation_state = False
+ # Compute reservation state according to its component's moves.
+ if production.state not in ('draft', 'done', 'cancel'):
+ relevant_move_state = production.move_raw_ids._get_relevant_state_among_moves()
+ if relevant_move_state == 'partially_available':
+ if production.bom_id.operation_ids and production.bom_id.ready_to_produce == 'asap':
+ production.reservation_state = production._get_ready_to_produce_state()
+ else:
+ production.reservation_state = 'confirmed'
+ elif relevant_move_state != 'draft':
+ production.reservation_state = relevant_move_state
+
+ @api.depends('move_raw_ids', 'state', 'move_raw_ids.product_uom_qty')
+ def _compute_unreserve_visible(self):
+ for order in self:
+ already_reserved = order.state not in ('done', 'cancel') and order.mapped('move_raw_ids.move_line_ids')
+ any_quantity_done = any(m.quantity_done > 0 for m in order.move_raw_ids)
+
+ order.unreserve_visible = not any_quantity_done and already_reserved
+ order.reserve_visible = order.state in ('confirmed', 'progress', 'to_close') and any(move.product_uom_qty and move.state in ['confirmed', 'partially_available'] for move in order.move_raw_ids)
+
+ @api.depends('workorder_ids.state', 'move_finished_ids', 'move_finished_ids.quantity_done')
+ def _get_produced_qty(self):
+ for production in self:
+ done_moves = production.move_finished_ids.filtered(lambda x: x.state != 'cancel' and x.product_id.id == production.product_id.id)
+ qty_produced = sum(done_moves.mapped('quantity_done'))
+ production.qty_produced = qty_produced
+ return True
+
+ def _compute_scrap_move_count(self):
+ data = self.env['stock.scrap'].read_group([('production_id', 'in', self.ids)], ['production_id'], ['production_id'])
+ count_data = dict((item['production_id'][0], item['production_id_count']) for item in data)
+ for production in self:
+ production.scrap_count = count_data.get(production.id, 0)
+
+ @api.depends('move_finished_ids')
+ def _compute_move_byproduct_ids(self):
+ for order in self:
+ order.move_byproduct_ids = order.move_finished_ids.filtered(lambda m: m.product_id != order.product_id)
+
+ def _set_move_byproduct_ids(self):
+ move_finished_ids = self.move_finished_ids.filtered(lambda m: m.product_id == self.product_id)
+ self.move_finished_ids = move_finished_ids | self.move_byproduct_ids
+
+ @api.depends('state')
+ def _compute_show_lock(self):
+ for order in self:
+ order.show_lock = self.env.user.has_group('mrp.group_locked_by_default') and order.id is not False and order.state not in {'cancel', 'draft'}
+
+ @api.depends('state','move_raw_ids')
+ def _compute_show_lot_ids(self):
+ for order in self:
+ order.show_lot_ids = order.state != 'draft' and any(m.product_id.tracking == 'serial' for m in order.move_raw_ids)
+
+ _sql_constraints = [
+ ('name_uniq', 'unique(name, company_id)', 'Reference must be unique per Company!'),
+ ('qty_positive', 'check (product_qty > 0)', 'The quantity to produce must be positive!'),
+ ]
+
+ @api.model
+ def _search_delay_alert_date(self, operator, value):
+ late_stock_moves = self.env['stock.move'].search([('delay_alert_date', operator, value)])
+ return ['|', ('move_raw_ids', 'in', late_stock_moves.ids), ('move_finished_ids', 'in', late_stock_moves.ids)]
+
+ @api.onchange('company_id')
+ def onchange_company_id(self):
+ if self.company_id:
+ if self.move_raw_ids:
+ self.move_raw_ids.update({'company_id': self.company_id})
+ if self.picking_type_id and self.picking_type_id.company_id != self.company_id:
+ self.picking_type_id = self.env['stock.picking.type'].search([
+ ('code', '=', 'mrp_operation'),
+ ('warehouse_id.company_id', '=', self.company_id.id),
+ ], limit=1).id
+
+ @api.onchange('product_id', 'picking_type_id', 'company_id')
+ def onchange_product_id(self):
+ """ Finds UoM of changed product. """
+ if not self.product_id:
+ self.bom_id = False
+ elif not self.bom_id or self.bom_id.product_tmpl_id != self.product_tmpl_id or (self.bom_id.product_id and self.bom_id.product_id != self.product_id):
+ bom = self.env['mrp.bom']._bom_find(product=self.product_id, picking_type=self.picking_type_id, company_id=self.company_id.id, bom_type='normal')
+ if bom:
+ self.bom_id = bom.id
+ self.product_qty = self.bom_id.product_qty
+ self.product_uom_id = self.bom_id.product_uom_id.id
+ else:
+ self.bom_id = False
+ self.product_uom_id = self.product_id.uom_id.id
+
+ @api.onchange('product_qty', 'product_uom_id')
+ def _onchange_product_qty(self):
+ for workorder in self.workorder_ids:
+ workorder.product_uom_id = self.product_uom_id
+ if self._origin.product_qty:
+ workorder.duration_expected = workorder._get_duration_expected(ratio=self.product_qty / self._origin.product_qty)
+ else:
+ workorder.duration_expected = workorder._get_duration_expected()
+ if workorder.date_planned_start and workorder.duration_expected:
+ workorder.date_planned_finished = workorder.date_planned_start + relativedelta(minutes=workorder.duration_expected)
+
+ @api.onchange('bom_id')
+ def _onchange_bom_id(self):
+ if not self.product_id and self.bom_id:
+ self.product_id = self.bom_id.product_id or self.bom_id.product_tmpl_id.product_variant_ids[0]
+ self.product_qty = self.bom_id.product_qty or 1.0
+ self.product_uom_id = self.bom_id and self.bom_id.product_uom_id.id or self.product_id.uom_id.id
+ self.move_raw_ids = [(2, move.id) for move in self.move_raw_ids.filtered(lambda m: m.bom_line_id)]
+ self.move_finished_ids = [(2, move.id) for move in self.move_finished_ids]
+ self.picking_type_id = self.bom_id.picking_type_id or self.picking_type_id
+
+ @api.onchange('date_planned_start', 'product_id')
+ def _onchange_date_planned_start(self):
+ if self.date_planned_start and not self.is_planned:
+ date_planned_finished = self.date_planned_start + relativedelta(days=self.product_id.produce_delay)
+ date_planned_finished = date_planned_finished + relativedelta(days=self.company_id.manufacturing_lead)
+ if date_planned_finished == self.date_planned_start:
+ date_planned_finished = date_planned_finished + relativedelta(hours=1)
+ self.date_planned_finished = date_planned_finished
+ self.move_raw_ids = [(1, m.id, {'date': self.date_planned_start}) for m in self.move_raw_ids]
+ self.move_finished_ids = [(1, m.id, {'date': date_planned_finished}) for m in self.move_finished_ids]
+
+ @api.onchange('bom_id', 'product_id', 'product_qty', 'product_uom_id')
+ def _onchange_move_raw(self):
+ if not self.bom_id and not self._origin.product_id:
+ return
+ # Clear move raws if we are changing the product. In case of creation (self._origin is empty),
+ # we need to avoid keeping incorrect lines, so clearing is necessary too.
+ if self.product_id != self._origin.product_id:
+ self.move_raw_ids = [(5,)]
+ if self.bom_id and self.product_qty > 0:
+ # keep manual entries
+ list_move_raw = [(4, move.id) for move in self.move_raw_ids.filtered(lambda m: not m.bom_line_id)]
+ moves_raw_values = self._get_moves_raw_values()
+ move_raw_dict = {move.bom_line_id.id: move for move in self.move_raw_ids.filtered(lambda m: m.bom_line_id)}
+ for move_raw_values in moves_raw_values:
+ if move_raw_values['bom_line_id'] in move_raw_dict:
+ # update existing entries
+ list_move_raw += [(1, move_raw_dict[move_raw_values['bom_line_id']].id, move_raw_values)]
+ else:
+ # add new entries
+ list_move_raw += [(0, 0, move_raw_values)]
+ self.move_raw_ids = list_move_raw
+ else:
+ self.move_raw_ids = [(2, move.id) for move in self.move_raw_ids.filtered(lambda m: m.bom_line_id)]
+
+ @api.onchange('product_id')
+ def _onchange_move_finished_product(self):
+ self.move_finished_ids = [(5,)]
+ if self.product_id:
+ self._create_update_move_finished()
+
+ @api.onchange('bom_id', 'product_qty', 'product_uom_id')
+ def _onchange_move_finished(self):
+ if self.product_id and self.product_qty > 0:
+ self._create_update_move_finished()
+ else:
+ self.move_finished_ids = [(2, move.id) for move in self.move_finished_ids.filtered(lambda m: m.bom_line_id)]
+
+ @api.onchange('location_src_id', 'move_raw_ids', 'bom_id')
+ def _onchange_location(self):
+ source_location = self.location_src_id
+ self.move_raw_ids.update({
+ 'warehouse_id': source_location.get_warehouse().id,
+ 'location_id': source_location.id,
+ })
+
+ @api.onchange('location_dest_id', 'move_finished_ids', 'bom_id')
+ def _onchange_location_dest(self):
+ destination_location = self.location_dest_id
+ update_value_list = []
+ for move in self.move_finished_ids:
+ update_value_list += [(1, move.id, ({
+ 'warehouse_id': destination_location.get_warehouse().id,
+ 'location_dest_id': destination_location.id,
+ }))]
+ self.move_finished_ids = update_value_list
+
+ @api.onchange('picking_type_id')
+ def onchange_picking_type(self):
+ location = self.env.ref('stock.stock_location_stock')
+ try:
+ location.check_access_rule('read')
+ except (AttributeError, AccessError):
+ location = self.env['stock.warehouse'].search([('company_id', '=', self.env.company.id)], limit=1).lot_stock_id
+ self.move_raw_ids.update({'picking_type_id': self.picking_type_id})
+ self.move_finished_ids.update({'picking_type_id': self.picking_type_id})
+ self.location_src_id = self.picking_type_id.default_location_src_id.id or location.id
+ self.location_dest_id = self.picking_type_id.default_location_dest_id.id or location.id
+
+ @api.onchange('qty_producing', 'lot_producing_id')
+ def _onchange_producing(self):
+ self._set_qty_producing()
+
+ @api.onchange('lot_producing_id')
+ def _onchange_lot_producing(self):
+ if self.product_id.tracking == 'serial':
+ if self.env['stock.move.line'].search_count([
+ ('company_id', '=', self.company_id.id),
+ ('product_id', '=', self.product_id.id),
+ ('lot_id', '=', self.lot_producing_id.id),
+ ('state', '!=', 'cancel')
+ ]):
+ return {
+ 'warning': {
+ 'title': _('Warning'),
+ 'message': _('Existing Serial number (%s). Please correct the serial numbers encoded.') % self.lot_producing_id.name
+ }
+ }
+
+ @api.onchange('bom_id')
+ def _onchange_workorder_ids(self):
+ if self.bom_id:
+ self._create_workorder()
+ else:
+ self.workorder_ids = False
+
+ def write(self, vals):
+ if 'workorder_ids' in self:
+ production_to_replan = self.filtered(lambda p: p.is_planned)
+ res = super(MrpProduction, self).write(vals)
+
+ for production in self:
+ if 'date_planned_start' in vals and not self.env.context.get('force_date', False):
+ if production.state in ['done', 'cancel']:
+ raise UserError(_('You cannot move a manufacturing order once it is cancelled or done.'))
+ if production.is_planned:
+ production.button_unplan()
+ move_vals = self._get_move_finished_values(self.product_id, self.product_uom_qty, self.product_uom_id)
+ production.move_finished_ids.write({'date': move_vals['date']})
+ if vals.get('date_planned_start'):
+ production.move_raw_ids.write({'date': production.date_planned_start, 'date_deadline': production.date_planned_start})
+ if vals.get('date_planned_finished'):
+ production.move_finished_ids.write({'date': production.date_planned_finished})
+ if any(field in ['move_raw_ids', 'move_finished_ids', 'workorder_ids'] for field in vals) and production.state != 'draft':
+ if production.state == 'done':
+ # for some reason moves added after state = 'done' won't save group_id, reference if added in
+ # "stock_move.default_get()"
+ production.move_raw_ids.filtered(lambda move: move.additional and move.date > production.date_planned_start).write({
+ 'group_id': production.procurement_group_id.id,
+ 'reference': production.name,
+ 'date': production.date_planned_start,
+ 'date_deadline': production.date_planned_start
+ })
+ production.move_finished_ids.filtered(lambda move: move.additional and move.date > production.date_planned_finished).write({
+ 'reference': production.name,
+ 'date': production.date_planned_finished,
+ 'date_deadline': production.date_deadline
+ })
+ production._autoconfirm_production()
+ if production in production_to_replan:
+ production._plan_workorders(replan=True)
+ if production.state == 'done' and ('lot_producing_id' in vals or 'qty_producing' in vals):
+ finished_move_lines = production.move_finished_ids.filtered(
+ lambda move: move.product_id == self.product_id and move.state == 'done').mapped('move_line_ids')
+ if 'lot_producing_id' in vals:
+ finished_move_lines.write({'lot_id': vals.get('lot_producing_id')})
+ if 'qty_producing' in vals:
+ finished_move_lines.write({'qty_done': vals.get('qty_producing')})
+
+ if not production.bom_id.operation_ids and vals.get('date_planned_start') and not vals.get('date_planned_finished'):
+ new_date_planned_start = fields.Datetime.to_datetime(vals.get('date_planned_start'))
+ if not production.date_planned_finished or new_date_planned_start >= production.date_planned_finished:
+ production.date_planned_finished = new_date_planned_start + datetime.timedelta(hours=1)
+ return res
+
+ @api.model
+ def create(self, values):
+ # Remove from `move_finished_ids` the by-product moves and then move `move_byproduct_ids`
+ # into `move_finished_ids` to avoid duplicate and inconsistency.
+ if values.get('move_finished_ids', False):
+ values['move_finished_ids'] = list(filter(lambda move: move[2]['byproduct_id'] is False, values['move_finished_ids']))
+ if values.get('move_byproduct_ids', False):
+ values['move_finished_ids'] = values.get('move_finished_ids', []) + values['move_byproduct_ids']
+ del values['move_byproduct_ids']
+ if not values.get('name', False) or values['name'] == _('New'):
+ picking_type_id = values.get('picking_type_id') or self._get_default_picking_type()
+ picking_type_id = self.env['stock.picking.type'].browse(picking_type_id)
+ if picking_type_id:
+ values['name'] = picking_type_id.sequence_id.next_by_id()
+ else:
+ values['name'] = self.env['ir.sequence'].next_by_code('mrp.production') or _('New')
+ if not values.get('procurement_group_id'):
+ procurement_group_vals = self._prepare_procurement_group_vals(values)
+ values['procurement_group_id'] = self.env["procurement.group"].create(procurement_group_vals).id
+ production = super(MrpProduction, self).create(values)
+ (production.move_raw_ids | production.move_finished_ids).write({
+ 'group_id': production.procurement_group_id.id,
+ 'origin': production.name
+ })
+ production.move_raw_ids.write({'date': production.date_planned_start})
+ production.move_finished_ids.write({'date': production.date_planned_finished})
+ # Trigger move_raw creation when importing a file
+ if 'import_file' in self.env.context:
+ production._onchange_move_raw()
+ production._onchange_move_finished()
+ return production
+
+ def unlink(self):
+ if any(production.state == 'done' for production in self):
+ raise UserError(_('Cannot delete a manufacturing order in done state.'))
+ self.action_cancel()
+ not_cancel = self.filtered(lambda m: m.state != 'cancel')
+ if not_cancel:
+ productions_name = ', '.join([prod.display_name for prod in not_cancel])
+ raise UserError(_('%s cannot be deleted. Try to cancel them before.', productions_name))
+
+ workorders_to_delete = self.workorder_ids.filtered(lambda wo: wo.state != 'done')
+ if workorders_to_delete:
+ workorders_to_delete.unlink()
+ return super(MrpProduction, self).unlink()
+
+ def copy_data(self, default=None):
+ default = dict(default or {})
+ # covers at least 2 cases: backorders generation (follow default logic for moves copying)
+ # and copying a done MO via the form (i.e. copy only the non-cancelled moves since no backorder = cancelled finished moves)
+ if not default or 'move_finished_ids' not in default:
+ default['move_finished_ids'] = [(0, 0, move.copy_data()[0]) for move in self.move_finished_ids.filtered(lambda m: m.state != 'cancel' and m.product_qty != 0.0)]
+ if not default or 'move_raw_ids' not in default:
+ default['move_raw_ids'] = [(0, 0, move.copy_data()[0]) for move in self.move_raw_ids.filtered(lambda m: m.product_qty != 0.0)]
+ return super(MrpProduction, self).copy_data(default=default)
+
+ def action_toggle_is_locked(self):
+ self.ensure_one()
+ self.is_locked = not self.is_locked
+ return True
+
+ def _create_workorder(self):
+ for production in self:
+ if not production.bom_id:
+ continue
+ workorders_values = []
+
+ product_qty = production.product_uom_id._compute_quantity(production.product_qty, production.bom_id.product_uom_id)
+ exploded_boms, dummy = production.bom_id.explode(production.product_id, product_qty / production.bom_id.product_qty, picking_type=production.bom_id.picking_type_id)
+
+ for bom, bom_data in exploded_boms:
+ # If the operations of the parent BoM and phantom BoM are the same, don't recreate work orders.
+ if not (bom.operation_ids and (not bom_data['parent_line'] or bom_data['parent_line'].bom_id.operation_ids != bom.operation_ids)):
+ continue
+ for operation in bom.operation_ids:
+ workorders_values += [{
+ 'name': operation.name,
+ 'production_id': production.id,
+ 'workcenter_id': operation.workcenter_id.id,
+ 'product_uom_id': production.product_uom_id.id,
+ 'operation_id': operation.id,
+ 'state': 'pending',
+ 'consumption': production.consumption,
+ }]
+ production.workorder_ids = [(5, 0)] + [(0, 0, value) for value in workorders_values]
+ for workorder in production.workorder_ids:
+ workorder.duration_expected = workorder._get_duration_expected()
+
+ def _get_move_finished_values(self, product_id, product_uom_qty, product_uom, operation_id=False, byproduct_id=False):
+ group_orders = self.procurement_group_id.mrp_production_ids
+ move_dest_ids = self.move_dest_ids
+ if len(group_orders) > 1:
+ move_dest_ids |= group_orders[0].move_finished_ids.filtered(lambda m: m.product_id == self.product_id).move_dest_ids
+ date_planned_finished = self.date_planned_start + relativedelta(days=self.product_id.produce_delay)
+ date_planned_finished = date_planned_finished + relativedelta(days=self.company_id.manufacturing_lead)
+ if date_planned_finished == self.date_planned_start:
+ date_planned_finished = date_planned_finished + relativedelta(hours=1)
+ return {
+ 'product_id': product_id,
+ 'product_uom_qty': product_uom_qty,
+ 'product_uom': product_uom,
+ 'operation_id': operation_id,
+ 'byproduct_id': byproduct_id,
+ 'name': self.name,
+ 'date': date_planned_finished,
+ 'date_deadline': self.date_deadline,
+ 'picking_type_id': self.picking_type_id.id,
+ 'location_id': self.product_id.with_company(self.company_id).property_stock_production.id,
+ 'location_dest_id': self.location_dest_id.id,
+ 'company_id': self.company_id.id,
+ 'production_id': self.id,
+ 'warehouse_id': self.location_dest_id.get_warehouse().id,
+ 'origin': self.name,
+ 'group_id': self.procurement_group_id.id,
+ 'propagate_cancel': self.propagate_cancel,
+ 'move_dest_ids': [(4, x.id) for x in self.move_dest_ids if not byproduct_id],
+ }
+
+ def _get_moves_finished_values(self):
+ moves = []
+ for production in self:
+ if production.product_id in production.bom_id.byproduct_ids.mapped('product_id'):
+ raise UserError(_("You cannot have %s as the finished product and in the Byproducts", self.product_id.name))
+ moves.append(production._get_move_finished_values(production.product_id.id, production.product_qty, production.product_uom_id.id))
+ for byproduct in production.bom_id.byproduct_ids:
+ product_uom_factor = production.product_uom_id._compute_quantity(production.product_qty, production.bom_id.product_uom_id)
+ qty = byproduct.product_qty * (product_uom_factor / production.bom_id.product_qty)
+ moves.append(production._get_move_finished_values(
+ byproduct.product_id.id, qty, byproduct.product_uom_id.id,
+ byproduct.operation_id.id, byproduct.id))
+ return moves
+
+ def _create_update_move_finished(self):
+ """ This is a helper function to support complexity of onchange logic for MOs.
+ It is important that the special *2Many commands used here remain as long as function
+ is used within onchanges.
+ """
+ # keep manual entries
+ list_move_finished = [(4, move.id) for move in self.move_finished_ids.filtered(
+ lambda m: not m.byproduct_id and m.product_id != self.product_id)]
+ list_move_finished = []
+ moves_finished_values = self._get_moves_finished_values()
+ moves_byproduct_dict = {move.byproduct_id.id: move for move in self.move_finished_ids.filtered(lambda m: m.byproduct_id)}
+ move_finished = self.move_finished_ids.filtered(lambda m: m.product_id == self.product_id)
+ for move_finished_values in moves_finished_values:
+ if move_finished_values.get('byproduct_id') in moves_byproduct_dict:
+ # update existing entries
+ list_move_finished += [(1, moves_byproduct_dict[move_finished_values['byproduct_id']].id, move_finished_values)]
+ elif move_finished_values.get('product_id') == self.product_id.id and move_finished:
+ list_move_finished += [(1, move_finished.id, move_finished_values)]
+ else:
+ # add new entries
+ list_move_finished += [(0, 0, move_finished_values)]
+ self.move_finished_ids = list_move_finished
+
+ def _get_moves_raw_values(self):
+ moves = []
+ for production in self:
+ factor = production.product_uom_id._compute_quantity(production.product_qty, production.bom_id.product_uom_id) / production.bom_id.product_qty
+ boms, lines = production.bom_id.explode(production.product_id, factor, picking_type=production.bom_id.picking_type_id)
+ for bom_line, line_data in lines:
+ if bom_line.child_bom_id and bom_line.child_bom_id.type == 'phantom' or\
+ bom_line.product_id.type not in ['product', 'consu']:
+ continue
+ operation = bom_line.operation_id.id or line_data['parent_line'] and line_data['parent_line'].operation_id.id
+ moves.append(production._get_move_raw_values(
+ bom_line.product_id,
+ line_data['qty'],
+ bom_line.product_uom_id,
+ operation,
+ bom_line
+ ))
+ return moves
+
+ def _get_move_raw_values(self, product_id, product_uom_qty, product_uom, operation_id=False, bom_line=False):
+ source_location = self.location_src_id
+ data = {
+ 'sequence': bom_line.sequence if bom_line else 10,
+ 'name': self.name,
+ 'date': self.date_planned_start,
+ 'date_deadline': self.date_planned_start,
+ 'bom_line_id': bom_line.id if bom_line else False,
+ 'picking_type_id': self.picking_type_id.id,
+ 'product_id': product_id.id,
+ 'product_uom_qty': product_uom_qty,
+ 'product_uom': product_uom.id,
+ 'location_id': source_location.id,
+ 'location_dest_id': self.product_id.with_company(self.company_id).property_stock_production.id,
+ 'raw_material_production_id': self.id,
+ 'company_id': self.company_id.id,
+ 'operation_id': operation_id,
+ 'price_unit': product_id.standard_price,
+ 'procure_method': 'make_to_stock',
+ 'origin': self.name,
+ 'state': 'draft',
+ 'warehouse_id': source_location.get_warehouse().id,
+ 'group_id': self.procurement_group_id.id,
+ 'propagate_cancel': self.propagate_cancel,
+ }
+ return data
+
+ def _set_qty_producing(self):
+ if self.product_id.tracking == 'serial':
+ qty_producing_uom = self.product_uom_id._compute_quantity(self.qty_producing, self.product_id.uom_id, rounding_method='HALF-UP')
+ if qty_producing_uom != 1:
+ self.qty_producing = self.product_id.uom_id._compute_quantity(1, self.product_uom_id, rounding_method='HALF-UP')
+
+ for move in (self.move_raw_ids | self.move_finished_ids.filtered(lambda m: m.product_id != self.product_id)):
+ if move._should_bypass_set_qty_producing() or not move.product_uom:
+ continue
+ new_qty = float_round((self.qty_producing - self.qty_produced) * move.unit_factor, precision_rounding=move.product_uom.rounding)
+ move.move_line_ids.filtered(lambda ml: ml.state not in ('done', 'cancel')).qty_done = 0
+ move.move_line_ids = move._set_quantity_done_prepare_vals(new_qty)
+
+ def _update_raw_moves(self, factor):
+ self.ensure_one()
+ update_info = []
+ move_to_unlink = self.env['stock.move']
+ for move in self.move_raw_ids.filtered(lambda m: m.state not in ('done', 'cancel')):
+ old_qty = move.product_uom_qty
+ new_qty = old_qty * factor
+ if new_qty > 0:
+ move.write({'product_uom_qty': new_qty})
+ move._action_assign()
+ update_info.append((move, old_qty, new_qty))
+ else:
+ if move.quantity_done > 0:
+ raise UserError(_('Lines need to be deleted, but can not as you still have some quantities to consume in them. '))
+ move._action_cancel()
+ move_to_unlink |= move
+ move_to_unlink.unlink()
+ return update_info
+
+ def _get_ready_to_produce_state(self):
+ """ returns 'assigned' if enough components are reserved in order to complete
+ the first operation of the bom. If not returns 'waiting'
+ """
+ self.ensure_one()
+ first_operation = self.bom_id.operation_ids[0]
+ if len(self.bom_id.operation_ids) == 1:
+ moves_in_first_operation = self.move_raw_ids
+ else:
+ moves_in_first_operation = self.move_raw_ids.filtered(lambda move: move.operation_id == first_operation)
+ moves_in_first_operation = moves_in_first_operation.filtered(
+ lambda move: move.bom_line_id and
+ not move.bom_line_id._skip_bom_line(self.product_id)
+ )
+
+ if all(move.state == 'assigned' for move in moves_in_first_operation):
+ return 'assigned'
+ return 'confirmed'
+
+ def _autoconfirm_production(self):
+ """Automatically run `action_confirm` on `self`.
+
+ If the production has one of its move was added after the initial call
+ to `action_confirm`.
+ """
+ moves_to_confirm = self.env['stock.move']
+ for production in self:
+ if production.state in ('done', 'cancel'):
+ continue
+ additional_moves = production.move_raw_ids.filtered(
+ lambda move: move.state == 'draft' and move.additional
+ )
+ additional_moves.write({
+ 'group_id': production.procurement_group_id.id,
+ })
+ additional_moves._adjust_procure_method()
+ moves_to_confirm |= additional_moves
+ additional_byproducts = production.move_finished_ids.filtered(
+ lambda move: move.state == 'draft' and move.additional
+ )
+ moves_to_confirm |= additional_byproducts
+
+ if moves_to_confirm:
+ moves_to_confirm._action_confirm()
+ # run scheduler for moves forecasted to not have enough in stock
+ moves_to_confirm._trigger_scheduler()
+
+ self.workorder_ids.filtered(lambda w: w.state not in ['done', 'cancel'])._action_confirm()
+
+ def action_view_mrp_production_childs(self):
+ self.ensure_one()
+ mrp_production_ids = self.procurement_group_id.stock_move_ids.created_production_id.procurement_group_id.mrp_production_ids.ids
+ action = {
+ 'res_model': 'mrp.production',
+ 'type': 'ir.actions.act_window',
+ }
+ if len(mrp_production_ids) == 1:
+ action.update({
+ 'view_mode': 'form',
+ 'res_id': mrp_production_ids[0],
+ })
+ else:
+ action.update({
+ 'name': _("%s Child MO's") % self.name,
+ 'domain': [('id', 'in', mrp_production_ids)],
+ 'view_mode': 'tree,form',
+ })
+ return action
+
+ def action_view_mrp_production_sources(self):
+ self.ensure_one()
+ mrp_production_ids = self.procurement_group_id.mrp_production_ids.move_dest_ids.group_id.mrp_production_ids.ids
+ action = {
+ 'res_model': 'mrp.production',
+ 'type': 'ir.actions.act_window',
+ }
+ if len(mrp_production_ids) == 1:
+ action.update({
+ 'view_mode': 'form',
+ 'res_id': mrp_production_ids[0],
+ })
+ else:
+ action.update({
+ 'name': _("MO Generated by %s") % self.name,
+ 'domain': [('id', 'in', mrp_production_ids)],
+ 'view_mode': 'tree,form',
+ })
+ return action
+
+ def action_view_mrp_production_backorders(self):
+ backorder_ids = self.procurement_group_id.mrp_production_ids.ids
+ return {
+ 'res_model': 'mrp.production',
+ 'type': 'ir.actions.act_window',
+ 'name': _("Backorder MO's"),
+ 'domain': [('id', 'in', backorder_ids)],
+ 'view_mode': 'tree,form',
+ }
+
+ def action_generate_serial(self):
+ self.ensure_one()
+ self.lot_producing_id = self.env['stock.production.lot'].create({
+ 'product_id': self.product_id.id,
+ 'company_id': self.company_id.id
+ })
+ if self.move_finished_ids.filtered(lambda m: m.product_id == self.product_id).move_line_ids:
+ self.move_finished_ids.filtered(lambda m: m.product_id == self.product_id).move_line_ids.lot_id = self.lot_producing_id
+ if self.product_id.tracking == 'serial':
+ self._set_qty_producing()
+
+ def _action_generate_immediate_wizard(self):
+ view = self.env.ref('mrp.view_immediate_production')
+ return {
+ 'name': _('Immediate Production?'),
+ 'type': 'ir.actions.act_window',
+ 'view_mode': 'form',
+ 'res_model': 'mrp.immediate.production',
+ 'views': [(view.id, 'form')],
+ 'view_id': view.id,
+ 'target': 'new',
+ 'context': dict(self.env.context, default_mo_ids=[(4, mo.id) for mo in self]),
+ }
+
+ def action_confirm(self):
+ self._check_company()
+ for production in self:
+ if production.bom_id:
+ production.consumption = production.bom_id.consumption
+ if not production.move_raw_ids:
+ raise UserError(_("Add some materials to consume before marking this MO as to do."))
+ # In case of Serial number tracking, force the UoM to the UoM of product
+ if production.product_tracking == 'serial' and production.product_uom_id != production.product_id.uom_id:
+ production.write({
+ 'product_qty': production.product_uom_id._compute_quantity(production.product_qty, production.product_id.uom_id),
+ 'product_uom_id': production.product_id.uom_id
+ })
+ for move_finish in production.move_finished_ids.filtered(lambda m: m.product_id == production.product_id):
+ move_finish.write({
+ 'product_uom_qty': move_finish.product_uom._compute_quantity(move_finish.product_uom_qty, move_finish.product_id.uom_id),
+ 'product_uom': move_finish.product_id.uom_id
+ })
+ production.move_raw_ids._adjust_procure_method()
+ (production.move_raw_ids | production.move_finished_ids)._action_confirm()
+ production.workorder_ids._action_confirm()
+
+ # run scheduler for moves forecasted to not have enough in stock
+ self.move_raw_ids._trigger_scheduler()
+ return True
+
+ def action_assign(self):
+ for production in self:
+ production.move_raw_ids._action_assign()
+ return True
+
+ def button_plan(self):
+ """ Create work orders. And probably do stuff, like things. """
+ orders_to_plan = self.filtered(lambda order: not order.is_planned)
+ orders_to_confirm = orders_to_plan.filtered(lambda mo: mo.state == 'draft')
+ orders_to_confirm.action_confirm()
+ for order in orders_to_plan:
+ order._plan_workorders()
+ return True
+
+ def _plan_workorders(self, replan=False):
+ """ Plan all the production's workorders depending on the workcenters
+ work schedule.
+
+ :param replan: If it is a replan, only ready and pending workorder will be take in account
+ :type replan: bool.
+ """
+ self.ensure_one()
+
+ if not self.workorder_ids:
+ return
+ # Schedule all work orders (new ones and those already created)
+ qty_to_produce = max(self.product_qty - self.qty_produced, 0)
+ qty_to_produce = self.product_uom_id._compute_quantity(qty_to_produce, self.product_id.uom_id)
+ start_date = max(self.date_planned_start, datetime.datetime.now())
+ if replan:
+ workorder_ids = self.workorder_ids.filtered(lambda wo: wo.state in ['ready', 'pending'])
+ # We plan the manufacturing order according to its `date_planned_start`, but if
+ # `date_planned_start` is in the past, we plan it as soon as possible.
+ workorder_ids.leave_id.unlink()
+ else:
+ workorder_ids = self.workorder_ids.filtered(lambda wo: not wo.date_planned_start)
+ for workorder in workorder_ids:
+ workcenters = workorder.workcenter_id | workorder.workcenter_id.alternative_workcenter_ids
+
+ best_finished_date = datetime.datetime.max
+ vals = {}
+ for workcenter in workcenters:
+ # compute theoretical duration
+ if workorder.workcenter_id == workcenter:
+ duration_expected = workorder.duration_expected
+ else:
+ duration_expected = workorder._get_duration_expected(alternative_workcenter=workcenter)
+
+ from_date, to_date = workcenter._get_first_available_slot(start_date, duration_expected)
+ # If the workcenter is unavailable, try planning on the next one
+ if not from_date:
+ continue
+ # Check if this workcenter is better than the previous ones
+ if to_date and to_date < best_finished_date:
+ best_start_date = from_date
+ best_finished_date = to_date
+ best_workcenter = workcenter
+ vals = {
+ 'workcenter_id': workcenter.id,
+ 'duration_expected': duration_expected,
+ }
+
+ # If none of the workcenter are available, raise
+ if best_finished_date == datetime.datetime.max:
+ raise UserError(_('Impossible to plan the workorder. Please check the workcenter availabilities.'))
+
+ # Instantiate start_date for the next workorder planning
+ if workorder.next_work_order_id:
+ start_date = best_finished_date
+
+ # Create leave on chosen workcenter calendar
+ leave = self.env['resource.calendar.leaves'].create({
+ 'name': workorder.display_name,
+ 'calendar_id': best_workcenter.resource_calendar_id.id,
+ 'date_from': best_start_date,
+ 'date_to': best_finished_date,
+ 'resource_id': best_workcenter.resource_id.id,
+ 'time_type': 'other'
+ })
+ vals['leave_id'] = leave.id
+ workorder.write(vals)
+ self.with_context(force_date=True).write({
+ 'date_planned_start': self.workorder_ids[0].date_planned_start,
+ 'date_planned_finished': self.workorder_ids[-1].date_planned_finished
+ })
+
+ def button_unplan(self):
+ if any(wo.state == 'done' for wo in self.workorder_ids):
+ raise UserError(_("Some work orders are already done, you cannot unplan this manufacturing order."))
+ elif any(wo.state == 'progress' for wo in self.workorder_ids):
+ raise UserError(_("Some work orders have already started, you cannot unplan this manufacturing order."))
+
+ self.workorder_ids.leave_id.unlink()
+ self.workorder_ids.write({
+ 'date_planned_start': False,
+ 'date_planned_finished': False,
+ })
+
+ def _get_consumption_issues(self):
+ """Compare the quantity consumed of the components, the expected quantity
+ on the BoM and the consumption parameter on the order.
+
+ :return: list of tuples (order_id, product_id, consumed_qty, expected_qty) where the
+ consumption isn't honored. order_id and product_id are recordset of mrp.production
+ and product.product respectively
+ :rtype: list
+ """
+ issues = []
+ if self.env.context.get('skip_consumption', False) or self.env.context.get('skip_immediate', False):
+ return issues
+ for order in self:
+ if order.consumption == 'flexible' or not order.bom_id or not order.bom_id.bom_line_ids:
+ continue
+ expected_move_values = order._get_moves_raw_values()
+ expected_qty_by_product = defaultdict(float)
+ for move_values in expected_move_values:
+ move_product = self.env['product.product'].browse(move_values['product_id'])
+ move_uom = self.env['uom.uom'].browse(move_values['product_uom'])
+ move_product_qty = move_uom._compute_quantity(move_values['product_uom_qty'], move_product.uom_id)
+ expected_qty_by_product[move_product] += move_product_qty * order.qty_producing / order.product_qty
+
+ done_qty_by_product = defaultdict(float)
+ for move in order.move_raw_ids:
+ qty_done = move.product_uom._compute_quantity(move.quantity_done, move.product_id.uom_id)
+ rounding = move.product_id.uom_id.rounding
+ if not (move.product_id in expected_qty_by_product or float_is_zero(qty_done, precision_rounding=rounding)):
+ issues.append((order, move.product_id, qty_done, 0.0))
+ continue
+ done_qty_by_product[move.product_id] += qty_done
+
+ for product, qty_to_consume in expected_qty_by_product.items():
+ qty_done = done_qty_by_product.get(product, 0.0)
+ if float_compare(qty_to_consume, qty_done, precision_rounding=product.uom_id.rounding) != 0:
+ issues.append((order, product, qty_done, qty_to_consume))
+
+ return issues
+
+ def _action_generate_consumption_wizard(self, consumption_issues):
+ ctx = self.env.context.copy()
+ lines = []
+ for order, product_id, consumed_qty, expected_qty in consumption_issues:
+ lines.append((0, 0, {
+ 'mrp_production_id': order.id,
+ 'product_id': product_id.id,
+ 'consumption': order.consumption,
+ 'product_uom_id': product_id.uom_id.id,
+ 'product_consumed_qty_uom': consumed_qty,
+ 'product_expected_qty_uom': expected_qty
+ }))
+ ctx.update({'default_mrp_production_ids': self.ids, 'default_mrp_consumption_warning_line_ids': lines})
+ action = self.env["ir.actions.actions"]._for_xml_id("mrp.action_mrp_consumption_warning")
+ action['context'] = ctx
+ return action
+
+ def _get_quantity_produced_issues(self):
+ quantity_issues = []
+ if self.env.context.get('skip_backorder', False):
+ return quantity_issues
+ for order in self:
+ if not float_is_zero(order._get_quantity_to_backorder(), precision_rounding=order.product_uom_id.rounding):
+ quantity_issues.append(order)
+ return quantity_issues
+
+ def _action_generate_backorder_wizard(self, quantity_issues):
+ ctx = self.env.context.copy()
+ lines = []
+ for order in quantity_issues:
+ lines.append((0, 0, {
+ 'mrp_production_id': order.id,
+ 'to_backorder': True
+ }))
+ ctx.update({'default_mrp_production_ids': self.ids, 'default_mrp_production_backorder_line_ids': lines})
+ action = self.env["ir.actions.actions"]._for_xml_id("mrp.action_mrp_production_backorder")
+ action['context'] = ctx
+ return action
+
+ def action_cancel(self):
+ """ Cancels production order, unfinished stock moves and set procurement
+ orders in exception """
+ if not self.move_raw_ids:
+ self.state = 'cancel'
+ return True
+ self._action_cancel()
+ return True
+
+ def _action_cancel(self):
+ documents_by_production = {}
+ for production in self:
+ documents = defaultdict(list)
+ for move_raw_id in self.move_raw_ids.filtered(lambda m: m.state not in ('done', 'cancel')):
+ iterate_key = self._get_document_iterate_key(move_raw_id)
+ if iterate_key:
+ document = self.env['stock.picking']._log_activity_get_documents({move_raw_id: (move_raw_id.product_uom_qty, 0)}, iterate_key, 'UP')
+ for key, value in document.items():
+ documents[key] += [value]
+ if documents:
+ documents_by_production[production] = documents
+ # log an activity on Parent MO if child MO is cancelled.
+ finish_moves = production.move_finished_ids.filtered(lambda x: x.state not in ('done', 'cancel'))
+ if finish_moves:
+ production._log_downside_manufactured_quantity({finish_move: (production.product_uom_qty, 0.0) for finish_move in finish_moves}, cancel=True)
+
+ self.workorder_ids.filtered(lambda x: x.state not in ['done', 'cancel']).action_cancel()
+ finish_moves = self.move_finished_ids.filtered(lambda x: x.state not in ('done', 'cancel'))
+ raw_moves = self.move_raw_ids.filtered(lambda x: x.state not in ('done', 'cancel'))
+
+ (finish_moves | raw_moves)._action_cancel()
+ picking_ids = self.picking_ids.filtered(lambda x: x.state not in ('done', 'cancel'))
+ picking_ids.action_cancel()
+
+ for production, documents in documents_by_production.items():
+ filtered_documents = {}
+ for (parent, responsible), rendering_context in documents.items():
+ if not parent or parent._name == 'stock.picking' and parent.state == 'cancel' or parent == production:
+ continue
+ filtered_documents[(parent, responsible)] = rendering_context
+ production._log_manufacture_exception(filtered_documents, cancel=True)
+
+ # In case of a flexible BOM, we don't know from the state of the moves if the MO should
+ # remain in progress or done. Indeed, if all moves are done/cancel but the quantity produced
+ # is lower than expected, it might mean:
+ # - we have used all components but we still want to produce the quantity expected
+ # - we have used all components and we won't be able to produce the last units
+ #
+ # However, if the user clicks on 'Cancel', it is expected that the MO is either done or
+ # canceled. If the MO is still in progress at this point, it means that the move raws
+ # are either all done or a mix of done / canceled => the MO should be done.
+ self.filtered(lambda p: p.state not in ['done', 'cancel'] and p.bom_id.consumption == 'flexible').write({'state': 'done'})
+
+ return True
+
+ def _get_document_iterate_key(self, move_raw_id):
+ return move_raw_id.move_orig_ids and 'move_orig_ids' or False
+
+ def _cal_price(self, consumed_moves):
+ self.ensure_one()
+ return True
+
+ def _post_inventory(self, cancel_backorder=False):
+ for order in self:
+ moves_not_to_do = order.move_raw_ids.filtered(lambda x: x.state == 'done')
+ moves_to_do = order.move_raw_ids.filtered(lambda x: x.state not in ('done', 'cancel'))
+ for move in moves_to_do.filtered(lambda m: m.product_qty == 0.0 and m.quantity_done > 0):
+ move.product_uom_qty = move.quantity_done
+ # MRP do not merge move, catch the result of _action_done in order
+ # to get extra moves.
+ moves_to_do = moves_to_do._action_done()
+ moves_to_do = order.move_raw_ids.filtered(lambda x: x.state == 'done') - moves_not_to_do
+
+ finish_moves = order.move_finished_ids.filtered(lambda m: m.product_id == order.product_id and m.state not in ('done', 'cancel'))
+ # the finish move can already be completed by the workorder.
+ if not finish_moves.quantity_done:
+ finish_moves.quantity_done = float_round(order.qty_producing - order.qty_produced, precision_rounding=order.product_uom_id.rounding, rounding_method='HALF-UP')
+ finish_moves.move_line_ids.lot_id = order.lot_producing_id
+ order._cal_price(moves_to_do)
+
+ moves_to_finish = order.move_finished_ids.filtered(lambda x: x.state not in ('done', 'cancel'))
+ moves_to_finish = moves_to_finish._action_done(cancel_backorder=cancel_backorder)
+ order.action_assign()
+ consume_move_lines = moves_to_do.mapped('move_line_ids')
+ order.move_finished_ids.move_line_ids.consume_line_ids = [(6, 0, consume_move_lines.ids)]
+ return True
+
+ @api.model
+ def _get_name_backorder(self, name, sequence):
+ if not sequence:
+ return name
+ seq_back = "-" + "0" * (SIZE_BACK_ORDER_NUMERING - 1 - int(math.log10(sequence))) + str(sequence)
+ regex = re.compile(r"-\d+$")
+ if regex.search(name) and sequence > 1:
+ return regex.sub(seq_back, name)
+ return name + seq_back
+
+ def _get_backorder_mo_vals(self):
+ self.ensure_one()
+ next_seq = max(self.procurement_group_id.mrp_production_ids.mapped("backorder_sequence"))
+ return {
+ 'name': self._get_name_backorder(self.name, next_seq + 1),
+ 'backorder_sequence': next_seq + 1,
+ 'procurement_group_id': self.procurement_group_id.id,
+ 'move_raw_ids': None,
+ 'move_finished_ids': None,
+ 'product_qty': self._get_quantity_to_backorder(),
+ 'lot_producing_id': False,
+ 'origin': self.origin
+ }
+
+ def _generate_backorder_productions(self, close_mo=True):
+ backorders = self.env['mrp.production']
+ for production in self:
+ if production.backorder_sequence == 0: # Activate backorder naming
+ production.backorder_sequence = 1
+ production.name = self._get_name_backorder(production.name, production.backorder_sequence)
+ backorder_mo = production.copy(default=production._get_backorder_mo_vals())
+ if close_mo:
+ production.move_raw_ids.filtered(lambda m: m.state not in ('done', 'cancel')).write({
+ 'raw_material_production_id': backorder_mo.id,
+ })
+ production.move_finished_ids.filtered(lambda m: m.state not in ('done', 'cancel')).write({
+ 'production_id': backorder_mo.id,
+ })
+ else:
+ new_moves_vals = []
+ for move in production.move_raw_ids | production.move_finished_ids:
+ if not move.additional:
+ qty_to_split = move.product_uom_qty - move.unit_factor * production.qty_producing
+ qty_to_split = move.product_uom._compute_quantity(qty_to_split, move.product_id.uom_id, rounding_method='HALF-UP')
+ move_vals = move._split(qty_to_split)
+ if not move_vals:
+ continue
+ if move.raw_material_production_id:
+ move_vals[0]['raw_material_production_id'] = backorder_mo.id
+ else:
+ move_vals[0]['production_id'] = backorder_mo.id
+ new_moves_vals.append(move_vals[0])
+ new_moves = self.env['stock.move'].create(new_moves_vals)
+ backorders |= backorder_mo
+ for old_wo, wo in zip(production.workorder_ids, backorder_mo.workorder_ids):
+ wo.qty_produced = max(old_wo.qty_produced - old_wo.qty_producing, 0)
+ if wo.product_tracking == 'serial':
+ wo.qty_producing = 1
+ else:
+ wo.qty_producing = wo.qty_remaining
+ if wo.qty_producing == 0:
+ wo.action_cancel()
+
+ # We need to adapt `duration_expected` on both the original workorders and their
+ # backordered workorders. To do that, we use the original `duration_expected` and the
+ # ratio of the quantity really produced and the quantity to produce.
+ ratio = production.qty_producing / production.product_qty
+ for workorder in production.workorder_ids:
+ workorder.duration_expected = workorder.duration_expected * ratio
+ for workorder in backorder_mo.workorder_ids:
+ workorder.duration_expected = workorder.duration_expected * (1 - ratio)
+
+ # As we have split the moves before validating them, we need to 'remove' the excess reservation
+ if not close_mo:
+ self.move_raw_ids.filtered(lambda m: not m.additional)._do_unreserve()
+ self.move_raw_ids.filtered(lambda m: not m.additional)._action_assign()
+ # Confirm only productions with remaining components
+ backorders.filtered(lambda mo: mo.move_raw_ids).action_confirm()
+ backorders.filtered(lambda mo: mo.move_raw_ids).action_assign()
+
+ # Remove the serial move line without reserved quantity. Post inventory will assigned all the non done moves
+ # So those move lines are duplicated.
+ backorders.move_raw_ids.move_line_ids.filtered(lambda ml: ml.product_id.tracking == 'serial' and ml.product_qty == 0).unlink()
+ backorders.move_raw_ids._recompute_state()
+
+ return backorders
+
+ def button_mark_done(self):
+ self._button_mark_done_sanity_checks()
+
+ if not self.env.context.get('button_mark_done_production_ids'):
+ self = self.with_context(button_mark_done_production_ids=self.ids)
+ res = self._pre_button_mark_done()
+ if res is not True:
+ return res
+
+ if self.env.context.get('mo_ids_to_backorder'):
+ productions_to_backorder = self.browse(self.env.context['mo_ids_to_backorder'])
+ productions_not_to_backorder = self - productions_to_backorder
+ else:
+ productions_not_to_backorder = self
+ productions_to_backorder = self.env['mrp.production']
+
+ self.workorder_ids.button_finish()
+
+ productions_not_to_backorder._post_inventory(cancel_backorder=True)
+ productions_to_backorder._post_inventory(cancel_backorder=False)
+ backorders = productions_to_backorder._generate_backorder_productions()
+
+ # if completed products make other confirmed/partially_available moves available, assign them
+ done_move_finished_ids = (productions_to_backorder.move_finished_ids | productions_not_to_backorder.move_finished_ids).filtered(lambda m: m.state == 'done')
+ done_move_finished_ids._trigger_assign()
+
+ # Moves without quantity done are not posted => set them as done instead of canceling. In
+ # case the user edits the MO later on and sets some consumed quantity on those, we do not
+ # want the move lines to be canceled.
+ (productions_not_to_backorder.move_raw_ids | productions_not_to_backorder.move_finished_ids).filtered(lambda x: x.state not in ('done', 'cancel')).write({
+ 'state': 'done',
+ 'product_uom_qty': 0.0,
+ })
+
+ for production in self:
+ production.write({
+ 'date_finished': fields.Datetime.now(),
+ 'product_qty': production.qty_produced,
+ 'priority': '0',
+ 'is_locked': True,
+ })
+
+ for workorder in self.workorder_ids.filtered(lambda w: w.state not in ('done', 'cancel')):
+ workorder.duration_expected = workorder._get_duration_expected()
+
+ if not backorders:
+ if self.env.context.get('from_workorder'):
+ return {
+ 'type': 'ir.actions.act_window',
+ 'res_model': 'mrp.production',
+ 'views': [[self.env.ref('mrp.mrp_production_form_view').id, 'form']],
+ 'res_id': self.id,
+ 'target': 'main',
+ }
+ return True
+ context = self.env.context.copy()
+ context = {k: v for k, v in context.items() if not k.startswith('default_')}
+ for k, v in context.items():
+ if k.startswith('skip_'):
+ context[k] = False
+ action = {
+ 'res_model': 'mrp.production',
+ 'type': 'ir.actions.act_window',
+ 'context': dict(context, mo_ids_to_backorder=None)
+ }
+ if len(backorders) == 1:
+ action.update({
+ 'view_mode': 'form',
+ 'res_id': backorders[0].id,
+ })
+ else:
+ action.update({
+ 'name': _("Backorder MO"),
+ 'domain': [('id', 'in', backorders.ids)],
+ 'view_mode': 'tree,form',
+ })
+ return action
+
+ def _pre_button_mark_done(self):
+ productions_to_immediate = self._check_immediate()
+ if productions_to_immediate:
+ return productions_to_immediate._action_generate_immediate_wizard()
+
+ for production in self:
+ if float_is_zero(production.qty_producing, precision_rounding=production.product_uom_id.rounding):
+ raise UserError(_('The quantity to produce must be positive!'))
+ if not any(production.move_raw_ids.mapped('quantity_done')):
+ raise UserError(_("You must indicate a non-zero amount consumed for at least one of your components"))
+
+ consumption_issues = self._get_consumption_issues()
+ if consumption_issues:
+ return self._action_generate_consumption_wizard(consumption_issues)
+
+ quantity_issues = self._get_quantity_produced_issues()
+ if quantity_issues:
+ return self._action_generate_backorder_wizard(quantity_issues)
+ return True
+
+ def _button_mark_done_sanity_checks(self):
+ self._check_company()
+ for order in self:
+ order._check_sn_uniqueness()
+
+ def do_unreserve(self):
+ self.move_raw_ids.filtered(lambda x: x.state not in ('done', 'cancel'))._do_unreserve()
+ return True
+
+ def button_unreserve(self):
+ self.ensure_one()
+ self.do_unreserve()
+ return True
+
+ def button_scrap(self):
+ self.ensure_one()
+ return {
+ 'name': _('Scrap'),
+ 'view_mode': 'form',
+ 'res_model': 'stock.scrap',
+ 'view_id': self.env.ref('stock.stock_scrap_form_view2').id,
+ 'type': 'ir.actions.act_window',
+ 'context': {'default_production_id': self.id,
+ 'product_ids': (self.move_raw_ids.filtered(lambda x: x.state not in ('done', 'cancel')) | self.move_finished_ids.filtered(lambda x: x.state == 'done')).mapped('product_id').ids,
+ 'default_company_id': self.company_id.id
+ },
+ 'target': 'new',
+ }
+
+ def action_see_move_scrap(self):
+ self.ensure_one()
+ action = self.env["ir.actions.actions"]._for_xml_id("stock.action_stock_scrap")
+ action['domain'] = [('production_id', '=', self.id)]
+ action['context'] = dict(self._context, default_origin=self.name)
+ return action
+
+ @api.model
+ def get_empty_list_help(self, help):
+ self = self.with_context(
+ empty_list_help_document_name=_("manufacturing order"),
+ )
+ return super(MrpProduction, self).get_empty_list_help(help)
+
+ def _log_downside_manufactured_quantity(self, moves_modification, cancel=False):
+
+ def _keys_in_sorted(move):
+ """ sort by picking and the responsible for the product the
+ move.
+ """
+ return (move.picking_id.id, move.product_id.responsible_id.id)
+
+ def _keys_in_groupby(move):
+ """ group by picking and the responsible for the product the
+ move.
+ """
+ return (move.picking_id, move.product_id.responsible_id)
+
+ def _render_note_exception_quantity_mo(rendering_context):
+ values = {
+ 'production_order': self,
+ 'order_exceptions': rendering_context,
+ 'impacted_pickings': False,
+ 'cancel': cancel
+ }
+ return self.env.ref('mrp.exception_on_mo')._render(values=values)
+
+ documents = self.env['stock.picking']._log_activity_get_documents(moves_modification, 'move_dest_ids', 'DOWN', _keys_in_sorted, _keys_in_groupby)
+ documents = self.env['stock.picking']._less_quantities_than_expected_add_documents(moves_modification, documents)
+ self.env['stock.picking']._log_activity(_render_note_exception_quantity_mo, documents)
+
+ def _log_manufacture_exception(self, documents, cancel=False):
+
+ def _render_note_exception_quantity_mo(rendering_context):
+ visited_objects = []
+ order_exceptions = {}
+ for exception in rendering_context:
+ order_exception, visited = exception
+ order_exceptions.update(order_exception)
+ visited_objects += visited
+ visited_objects = self.env[visited_objects[0]._name].concat(*visited_objects)
+ impacted_object = []
+ if visited_objects and visited_objects._name == 'stock.move':
+ visited_objects |= visited_objects.mapped('move_orig_ids')
+ impacted_object = visited_objects.filtered(lambda m: m.state not in ('done', 'cancel')).mapped('picking_id')
+ values = {
+ 'production_order': self,
+ 'order_exceptions': order_exceptions,
+ 'impacted_object': impacted_object,
+ 'cancel': cancel
+ }
+ return self.env.ref('mrp.exception_on_mo')._render(values=values)
+
+ self.env['stock.picking']._log_activity(_render_note_exception_quantity_mo, documents)
+
+ def button_unbuild(self):
+ self.ensure_one()
+ return {
+ 'name': _('Unbuild: %s', self.product_id.display_name),
+ 'view_mode': 'form',
+ 'res_model': 'mrp.unbuild',
+ 'view_id': self.env.ref('mrp.mrp_unbuild_form_view_simplified').id,
+ 'type': 'ir.actions.act_window',
+ 'context': {'default_product_id': self.product_id.id,
+ 'default_mo_id': self.id,
+ 'default_company_id': self.company_id.id,
+ 'default_location_id': self.location_dest_id.id,
+ 'default_location_dest_id': self.location_src_id.id,
+ 'create': False, 'edit': False},
+ 'target': 'new',
+ }
+
+ @api.model
+ def _prepare_procurement_group_vals(self, values):
+ return {'name': values['name']}
+
+ def _get_quantity_to_backorder(self):
+ self.ensure_one()
+ return max(self.product_qty - self.qty_producing, 0)
+
+ def _check_sn_uniqueness(self):
+ """ Alert the user if the serial number as already been consumed/produced """
+ if self.product_tracking == 'serial' and self.lot_producing_id:
+ sml = self.env['stock.move.line'].search_count([
+ ('lot_id', '=', self.lot_producing_id.id),
+ ('location_id.usage', '=', 'production'),
+ ('qty_done', '=', 1),
+ ('state', '=', 'done')
+ ])
+ if sml:
+ raise UserError(_('This serial number for product %s has already been produced', self.product_id.name))
+
+ for move in self.move_finished_ids:
+ if move.has_tracking != 'serial' or move.product_id == self.product_id:
+ continue
+ for move_line in move.move_line_ids:
+ domain = [
+ ('lot_id', '=', move_line.lot_id.id),
+ ('qty_done', '=', 1),
+ ('state', '=', 'done')
+ ]
+ message = _('The serial number %(number)s used for byproduct %(product_name)s has already been produced',
+ number=move_line.lot_id.name,
+ product_name=move_line.product_id.name)
+ co_prod_move_lines = self.move_finished_ids.move_line_ids.filtered(lambda ml: ml.product_id != self.product_id)
+ domain_unbuild = domain + [
+ ('production_id', '=', False),
+ ('location_dest_id.usage', '=', 'production')
+ ]
+
+ # Check presence of same sn in previous productions
+ duplicates = self.env['stock.move.line'].search_count(domain + [
+ ('location_id.usage', '=', 'production')
+ ])
+ if duplicates:
+ # Maybe some move lines have been compensated by unbuild
+ duplicates_unbuild = self.env['stock.move.line'].search_count(domain_unbuild + [
+ ('move_id.unbuild_id', '!=', False)
+ ])
+ if not (duplicates_unbuild and duplicates - duplicates_unbuild == 0):
+ raise UserError(message)
+ # Check presence of same sn in current production
+ duplicates = co_prod_move_lines.filtered(lambda ml: ml.qty_done and ml.lot_id == move_line.lot_id) - move_line
+ if duplicates:
+ raise UserError(message)
+
+ for move in self.move_raw_ids:
+ if move.has_tracking != 'serial':
+ continue
+ for move_line in move.move_line_ids:
+ if float_is_zero(move_line.qty_done, precision_rounding=move_line.product_uom_id.rounding):
+ continue
+ domain = [
+ ('lot_id', '=', move_line.lot_id.id),
+ ('qty_done', '=', 1),
+ ('state', '=', 'done')
+ ]
+ message = _('The serial number %(number)s used for component %(component)s has already been consumed',
+ number=move_line.lot_id.name,
+ component=move_line.product_id.name)
+ co_prod_move_lines = self.move_raw_ids.move_line_ids
+ domain_unbuild = domain + [
+ ('production_id', '=', False),
+ ('location_id.usage', '=', 'production')
+ ]
+
+ # Check presence of same sn in previous productions
+ duplicates = self.env['stock.move.line'].search_count(domain + [
+ ('location_dest_id.usage', '=', 'production')
+ ])
+ if duplicates:
+ # Maybe some move lines have been compensated by unbuild
+ duplicates_unbuild = self.env['stock.move.line'].search_count(domain_unbuild + [
+ ('move_id.unbuild_id', '!=', False)
+ ])
+ if not (duplicates_unbuild and duplicates - duplicates_unbuild == 0):
+ raise UserError(message)
+ # Check presence of same sn in current production
+ duplicates = co_prod_move_lines.filtered(lambda ml: ml.qty_done and ml.lot_id == move_line.lot_id) - move_line
+ if duplicates:
+ raise UserError(message)
+
+ def _check_immediate(self):
+ immediate_productions = self.browse()
+ if self.env.context.get('skip_immediate'):
+ return immediate_productions
+ pd = self.env['decimal.precision'].precision_get('Product Unit of Measure')
+ for production in self:
+ if all(float_is_zero(ml.qty_done, precision_digits=pd) for
+ ml in production.move_raw_ids.move_line_ids.filtered(lambda m: m.state not in ('done', 'cancel'))
+ ) and float_is_zero(production.qty_producing, precision_digits=pd):
+ immediate_productions |= production
+ return immediate_productions
diff --git a/addons/mrp/models/mrp_routing.py b/addons/mrp/models/mrp_routing.py
new file mode 100644
index 00000000..04ca748f
--- /dev/null
+++ b/addons/mrp/models/mrp_routing.py
@@ -0,0 +1,69 @@
+# -*- coding: utf-8 -*-
+# Part of Odoo. See LICENSE file for full copyright and licensing details.
+
+from odoo import api, fields, models, _
+
+
+class MrpRoutingWorkcenter(models.Model):
+ _name = 'mrp.routing.workcenter'
+ _description = 'Work Center Usage'
+ _order = 'sequence, id'
+ _check_company_auto = True
+
+ name = fields.Char('Operation', required=True)
+ workcenter_id = fields.Many2one('mrp.workcenter', 'Work Center', required=True, check_company=True)
+ sequence = fields.Integer(
+ 'Sequence', default=100,
+ help="Gives the sequence order when displaying a list of routing Work Centers.")
+ bom_id = fields.Many2one(
+ 'mrp.bom', 'Bill of Material', check_company=True,
+ index=True, ondelete='cascade',
+ help="The Bill of Material this operation is linked to")
+ company_id = fields.Many2one(
+ 'res.company', 'Company', default=lambda self: self.env.company)
+ worksheet_type = fields.Selection([
+ ('pdf', 'PDF'), ('google_slide', 'Google Slide'), ('text', 'Text')],
+ string="Work Sheet", default="text",
+ help="Defines if you want to use a PDF or a Google Slide as work sheet."
+ )
+ note = fields.Text('Description', help="Text worksheet description")
+ worksheet = fields.Binary('PDF')
+ worksheet_google_slide = fields.Char('Google Slide', help="Paste the url of your Google Slide. Make sure the access to the document is public.")
+ time_mode = fields.Selection([
+ ('auto', 'Compute based on tracked time'),
+ ('manual', 'Set duration manually')], string='Duration Computation',
+ default='manual')
+ time_mode_batch = fields.Integer('Based on', default=10)
+ time_cycle_manual = fields.Float(
+ 'Manual Duration', default=60,
+ help="Time in minutes:"
+ "- In manual mode, time used"
+ "- In automatic mode, supposed first time when there aren't any work orders yet")
+ time_cycle = fields.Float('Duration', compute="_compute_time_cycle")
+ workorder_count = fields.Integer("# Work Orders", compute="_compute_workorder_count")
+ workorder_ids = fields.One2many('mrp.workorder', 'operation_id', string="Work Orders")
+
+ @api.depends('time_cycle_manual', 'time_mode', 'workorder_ids')
+ def _compute_time_cycle(self):
+ manual_ops = self.filtered(lambda operation: operation.time_mode == 'manual')
+ for operation in manual_ops:
+ operation.time_cycle = operation.time_cycle_manual
+ for operation in self - manual_ops:
+ data = self.env['mrp.workorder'].read_group([
+ ('operation_id', '=', operation.id),
+ ('qty_produced', '>', 0),
+ ('state', '=', 'done')], ['operation_id', 'duration', 'qty_produced'], ['operation_id'],
+ limit=operation.time_mode_batch)
+ count_data = dict((item['operation_id'][0], (item['duration'], item['qty_produced'])) for item in data)
+ if count_data.get(operation.id) and count_data[operation.id][1]:
+ operation.time_cycle = (count_data[operation.id][0] / count_data[operation.id][1]) * (operation.workcenter_id.capacity or 1.0)
+ else:
+ operation.time_cycle = operation.time_cycle_manual
+
+ def _compute_workorder_count(self):
+ data = self.env['mrp.workorder'].read_group([
+ ('operation_id', 'in', self.ids),
+ ('state', '=', 'done')], ['operation_id'], ['operation_id'])
+ count_data = dict((item['operation_id'][0], item['operation_id_count']) for item in data)
+ for operation in self:
+ operation.workorder_count = count_data.get(operation.id, 0)
diff --git a/addons/mrp/models/mrp_unbuild.py b/addons/mrp/models/mrp_unbuild.py
new file mode 100644
index 00000000..60aeea67
--- /dev/null
+++ b/addons/mrp/models/mrp_unbuild.py
@@ -0,0 +1,300 @@
+# -*- coding: utf-8 -*-
+# Part of Odoo. See LICENSE file for full copyright and licensing details.
+
+from odoo import api, fields, models, _
+from odoo.exceptions import AccessError, UserError
+from odoo.tools import float_compare
+from odoo.osv import expression
+
+
+class MrpUnbuild(models.Model):
+ _name = "mrp.unbuild"
+ _description = "Unbuild Order"
+ _inherit = ['mail.thread', 'mail.activity.mixin']
+ _order = 'id desc'
+
+ name = fields.Char('Reference', copy=False, readonly=True, default=lambda x: _('New'))
+ product_id = fields.Many2one(
+ 'product.product', 'Product', check_company=True,
+ domain="[('type', 'in', ['product', 'consu']), '|', ('company_id', '=', False), ('company_id', '=', company_id)]",
+ required=True, states={'done': [('readonly', True)]})
+ company_id = fields.Many2one(
+ 'res.company', 'Company',
+ default=lambda s: s.env.company,
+ required=True, index=True, states={'done': [('readonly', True)]})
+ product_qty = fields.Float(
+ 'Quantity', default=1.0,
+ required=True, states={'done': [('readonly', True)]})
+ product_uom_id = fields.Many2one(
+ 'uom.uom', 'Unit of Measure',
+ required=True, states={'done': [('readonly', True)]})
+ bom_id = fields.Many2one(
+ 'mrp.bom', 'Bill of Material',
+ domain="""[
+ '|',
+ ('product_id', '=', product_id),
+ '&',
+ ('product_tmpl_id.product_variant_ids', '=', product_id),
+ ('product_id','=',False),
+ ('type', '=', 'normal'),
+ '|',
+ ('company_id', '=', company_id),
+ ('company_id', '=', False)
+ ]
+""",
+ states={'done': [('readonly', True)]}, check_company=True)
+ mo_id = fields.Many2one(
+ 'mrp.production', 'Manufacturing Order',
+ domain="[('id', 'in', allowed_mo_ids)]",
+ states={'done': [('readonly', True)]}, check_company=True)
+ mo_bom_id = fields.Many2one('mrp.bom', 'Bill of Material used on the Production Order', related='mo_id.bom_id')
+ lot_id = fields.Many2one(
+ 'stock.production.lot', 'Lot/Serial Number',
+ domain="[('product_id', '=', product_id), ('company_id', '=', company_id)]", check_company=True,
+ states={'done': [('readonly', True)]}, help="Lot/Serial Number of the product to unbuild.")
+ has_tracking=fields.Selection(related='product_id.tracking', readonly=True)
+ location_id = fields.Many2one(
+ 'stock.location', 'Source Location',
+ domain="[('usage','=','internal'), '|', ('company_id', '=', False), ('company_id', '=', company_id)]",
+ check_company=True,
+ required=True, states={'done': [('readonly', True)]}, help="Location where the product you want to unbuild is.")
+ location_dest_id = fields.Many2one(
+ 'stock.location', 'Destination Location',
+ domain="[('usage','=','internal'), '|', ('company_id', '=', False), ('company_id', '=', company_id)]",
+ check_company=True,
+ required=True, states={'done': [('readonly', True)]}, help="Location where you want to send the components resulting from the unbuild order.")
+ consume_line_ids = fields.One2many(
+ 'stock.move', 'consume_unbuild_id', readonly=True,
+ string='Consumed Disassembly Lines')
+ produce_line_ids = fields.One2many(
+ 'stock.move', 'unbuild_id', readonly=True,
+ string='Processed Disassembly Lines')
+ state = fields.Selection([
+ ('draft', 'Draft'),
+ ('done', 'Done')], string='Status', default='draft', index=True)
+ allowed_mo_ids = fields.One2many('mrp.production', compute='_compute_allowed_mo_ids')
+
+ @api.depends('company_id', 'product_id', 'bom_id')
+ def _compute_allowed_mo_ids(self):
+ for unbuild in self:
+ domain = [
+ ('state', '=', 'done'),
+ ('company_id', '=', unbuild.company_id.id)
+ ]
+ if unbuild.bom_id:
+ domain = expression.AND([domain, [('bom_id', '=', unbuild.bom_id.id)]])
+ elif unbuild.product_id:
+ domain = expression.AND([domain, [('product_id', '=', unbuild.product_id.id)]])
+ allowed_mos = self.env['mrp.production'].search_read(domain, ['id'])
+ if allowed_mos:
+ unbuild.allowed_mo_ids = [mo['id'] for mo in allowed_mos]
+ else:
+ unbuild.allowed_mo_ids = False
+
+ @api.onchange('company_id')
+ def _onchange_company_id(self):
+ if self.company_id:
+ warehouse = self.env['stock.warehouse'].search([('company_id', '=', self.company_id.id)], limit=1)
+ if self.location_id.company_id != self.company_id:
+ self.location_id = warehouse.lot_stock_id
+ if self.location_dest_id.company_id != self.company_id:
+ self.location_dest_id = warehouse.lot_stock_id
+ else:
+ self.location_id = False
+ self.location_dest_id = False
+
+ @api.onchange('mo_id')
+ def _onchange_mo_id(self):
+ if self.mo_id:
+ self.product_id = self.mo_id.product_id.id
+ self.bom_id = self.mo_id.bom_id
+ self.product_uom_id = self.mo_id.product_uom_id
+ if self.has_tracking == 'serial':
+ self.product_qty = 1
+ else:
+ self.product_qty = self.mo_id.product_qty
+
+ @api.onchange('product_id')
+ def _onchange_product_id(self):
+ if self.product_id:
+ self.bom_id = self.env['mrp.bom']._bom_find(product=self.product_id, company_id=self.company_id.id)
+ self.product_uom_id = self.mo_id.product_id == self.product_id and self.mo_id.product_uom_id.id or self.product_id.uom_id.id
+
+ @api.constrains('product_qty')
+ def _check_qty(self):
+ if self.product_qty <= 0:
+ raise ValueError(_('Unbuild Order product quantity has to be strictly positive.'))
+
+ @api.model
+ def create(self, vals):
+ if not vals.get('name') or vals['name'] == _('New'):
+ vals['name'] = self.env['ir.sequence'].next_by_code('mrp.unbuild') or _('New')
+ return super(MrpUnbuild, self).create(vals)
+
+ def unlink(self):
+ if 'done' in self.mapped('state'):
+ raise UserError(_("You cannot delete an unbuild order if the state is 'Done'."))
+ return super(MrpUnbuild, self).unlink()
+
+ def action_unbuild(self):
+ self.ensure_one()
+ self._check_company()
+ if self.product_id.tracking != 'none' and not self.lot_id.id:
+ raise UserError(_('You should provide a lot number for the final product.'))
+
+ if self.mo_id:
+ if self.mo_id.state != 'done':
+ raise UserError(_('You cannot unbuild a undone manufacturing order.'))
+
+ consume_moves = self._generate_consume_moves()
+ consume_moves._action_confirm()
+ produce_moves = self._generate_produce_moves()
+ produce_moves._action_confirm()
+
+ finished_moves = consume_moves.filtered(lambda m: m.product_id == self.product_id)
+ consume_moves -= finished_moves
+
+ if any(produce_move.has_tracking != 'none' and not self.mo_id for produce_move in produce_moves):
+ raise UserError(_('Some of your components are tracked, you have to specify a manufacturing order in order to retrieve the correct components.'))
+
+ if any(consume_move.has_tracking != 'none' and not self.mo_id for consume_move in consume_moves):
+ raise UserError(_('Some of your byproducts are tracked, you have to specify a manufacturing order in order to retrieve the correct byproducts.'))
+
+ for finished_move in finished_moves:
+ if finished_move.has_tracking != 'none':
+ self.env['stock.move.line'].create({
+ 'move_id': finished_move.id,
+ 'lot_id': self.lot_id.id,
+ 'qty_done': finished_move.product_uom_qty,
+ 'product_id': finished_move.product_id.id,
+ 'product_uom_id': finished_move.product_uom.id,
+ 'location_id': finished_move.location_id.id,
+ 'location_dest_id': finished_move.location_dest_id.id,
+ })
+ else:
+ finished_move.quantity_done = finished_move.product_uom_qty
+
+ # TODO: Will fail if user do more than one unbuild with lot on the same MO. Need to check what other unbuild has aready took
+ for move in produce_moves | consume_moves:
+ if move.has_tracking != 'none':
+ original_move = move in produce_moves and self.mo_id.move_raw_ids or self.mo_id.move_finished_ids
+ original_move = original_move.filtered(lambda m: m.product_id == move.product_id)
+ needed_quantity = move.product_uom_qty
+ moves_lines = original_move.mapped('move_line_ids')
+ if move in produce_moves and self.lot_id:
+ moves_lines = moves_lines.filtered(lambda ml: self.lot_id in ml.produce_line_ids.lot_id) # FIXME sle: double check with arm
+ for move_line in moves_lines:
+ # Iterate over all move_lines until we unbuilded the correct quantity.
+ taken_quantity = min(needed_quantity, move_line.qty_done)
+ if taken_quantity:
+ self.env['stock.move.line'].create({
+ 'move_id': move.id,
+ 'lot_id': move_line.lot_id.id,
+ 'qty_done': taken_quantity,
+ 'product_id': move.product_id.id,
+ 'product_uom_id': move_line.product_uom_id.id,
+ 'location_id': move.location_id.id,
+ 'location_dest_id': move.location_dest_id.id,
+ })
+ needed_quantity -= taken_quantity
+ else:
+ move.quantity_done = move.product_uom_qty
+
+ finished_moves._action_done()
+ consume_moves._action_done()
+ produce_moves._action_done()
+ produced_move_line_ids = produce_moves.mapped('move_line_ids').filtered(lambda ml: ml.qty_done > 0)
+ consume_moves.mapped('move_line_ids').write({'produce_line_ids': [(6, 0, produced_move_line_ids.ids)]})
+
+ return self.write({'state': 'done'})
+
+ def _generate_consume_moves(self):
+ moves = self.env['stock.move']
+ for unbuild in self:
+ if unbuild.mo_id:
+ finished_moves = unbuild.mo_id.move_finished_ids.filtered(lambda move: move.state == 'done')
+ factor = unbuild.product_qty / unbuild.mo_id.product_uom_id._compute_quantity(unbuild.mo_id.product_qty, unbuild.product_uom_id)
+ for finished_move in finished_moves:
+ moves += unbuild._generate_move_from_existing_move(finished_move, factor, finished_move.location_dest_id, finished_move.location_id)
+ else:
+ factor = unbuild.product_uom_id._compute_quantity(unbuild.product_qty, unbuild.bom_id.product_uom_id) / unbuild.bom_id.product_qty
+ moves += unbuild._generate_move_from_bom_line(self.product_id, self.product_uom_id, unbuild.product_qty)
+ for byproduct in unbuild.bom_id.byproduct_ids:
+ quantity = byproduct.product_qty * factor
+ moves += unbuild._generate_move_from_bom_line(byproduct.product_id, byproduct.product_uom_id, quantity, byproduct_id=byproduct.id)
+ return moves
+
+ def _generate_produce_moves(self):
+ moves = self.env['stock.move']
+ for unbuild in self:
+ if unbuild.mo_id:
+ raw_moves = unbuild.mo_id.move_raw_ids.filtered(lambda move: move.state == 'done')
+ factor = unbuild.product_qty / unbuild.mo_id.product_uom_id._compute_quantity(unbuild.mo_id.product_qty, unbuild.product_uom_id)
+ for raw_move in raw_moves:
+ moves += unbuild._generate_move_from_existing_move(raw_move, factor, raw_move.location_dest_id, self.location_dest_id)
+ else:
+ factor = unbuild.product_uom_id._compute_quantity(unbuild.product_qty, unbuild.bom_id.product_uom_id) / unbuild.bom_id.product_qty
+ boms, lines = unbuild.bom_id.explode(unbuild.product_id, factor, picking_type=unbuild.bom_id.picking_type_id)
+ for line, line_data in lines:
+ moves += unbuild._generate_move_from_bom_line(line.product_id, line.product_uom_id, line_data['qty'], bom_line_id=line.id)
+ return moves
+
+ def _generate_move_from_existing_move(self, move, factor, location_id, location_dest_id):
+ return self.env['stock.move'].create({
+ 'name': self.name,
+ 'date': self.create_date,
+ 'product_id': move.product_id.id,
+ 'product_uom_qty': move.product_uom_qty * factor,
+ 'product_uom': move.product_uom.id,
+ 'procure_method': 'make_to_stock',
+ 'location_dest_id': location_dest_id.id,
+ 'location_id': location_id.id,
+ 'warehouse_id': location_dest_id.get_warehouse().id,
+ 'unbuild_id': self.id,
+ 'company_id': move.company_id.id,
+ })
+
+ def _generate_move_from_bom_line(self, product, product_uom, quantity, bom_line_id=False, byproduct_id=False):
+ product_prod_location = product.with_company(self.company_id).property_stock_production
+ location_id = bom_line_id and product_prod_location or self.location_id
+ location_dest_id = bom_line_id and self.location_dest_id or product_prod_location
+ warehouse = location_dest_id.get_warehouse()
+ return self.env['stock.move'].create({
+ 'name': self.name,
+ 'date': self.create_date,
+ 'bom_line_id': bom_line_id,
+ 'byproduct_id': byproduct_id,
+ 'product_id': product.id,
+ 'product_uom_qty': quantity,
+ 'product_uom': product_uom.id,
+ 'procure_method': 'make_to_stock',
+ 'location_dest_id': location_dest_id.id,
+ 'location_id': location_id.id,
+ 'warehouse_id': warehouse.id,
+ 'unbuild_id': self.id,
+ 'company_id': self.company_id.id,
+ })
+
+ def action_validate(self):
+ self.ensure_one()
+ precision = self.env['decimal.precision'].precision_get('Product Unit of Measure')
+ available_qty = self.env['stock.quant']._get_available_quantity(self.product_id, self.location_id, self.lot_id, strict=True)
+ unbuild_qty = self.product_uom_id._compute_quantity(self.product_qty, self.product_id.uom_id)
+ if float_compare(available_qty, unbuild_qty, precision_digits=precision) >= 0:
+ return self.action_unbuild()
+ else:
+ return {
+ 'name': self.product_id.display_name + _(': Insufficient Quantity To Unbuild'),
+ 'view_mode': 'form',
+ 'res_model': 'stock.warn.insufficient.qty.unbuild',
+ 'view_id': self.env.ref('mrp.stock_warn_insufficient_qty_unbuild_form_view').id,
+ 'type': 'ir.actions.act_window',
+ 'context': {
+ 'default_product_id': self.product_id.id,
+ 'default_location_id': self.location_id.id,
+ 'default_unbuild_id': self.id,
+ 'default_quantity': unbuild_qty,
+ 'default_product_uom_name': self.product_id.uom_name
+ },
+ 'target': 'new'
+ }
diff --git a/addons/mrp/models/mrp_workcenter.py b/addons/mrp/models/mrp_workcenter.py
new file mode 100644
index 00000000..190581ee
--- /dev/null
+++ b/addons/mrp/models/mrp_workcenter.py
@@ -0,0 +1,346 @@
+# -*- coding: utf-8 -*-
+# Part of Odoo. See LICENSE file for full copyright and licensing details.
+
+from dateutil import relativedelta
+from datetime import timedelta
+from functools import partial
+import datetime
+from pytz import timezone
+
+from odoo import api, exceptions, fields, models, _
+from odoo.exceptions import ValidationError
+from odoo.addons.resource.models.resource import make_aware, Intervals
+from odoo.tools.float_utils import float_compare
+
+
+class MrpWorkcenter(models.Model):
+ _name = 'mrp.workcenter'
+ _description = 'Work Center'
+ _order = "sequence, id"
+ _inherit = ['resource.mixin']
+ _check_company_auto = True
+
+ # resource
+ name = fields.Char('Work Center', related='resource_id.name', store=True, readonly=False)
+ time_efficiency = fields.Float('Time Efficiency', related='resource_id.time_efficiency', default=100, store=True, readonly=False)
+ active = fields.Boolean('Active', related='resource_id.active', default=True, store=True, readonly=False)
+
+ code = fields.Char('Code', copy=False)
+ note = fields.Text(
+ 'Description',
+ help="Description of the Work Center.")
+ capacity = fields.Float(
+ 'Capacity', default=1.0,
+ help="Number of pieces that can be produced in parallel. In case the work center has a capacity of 5 and you have to produce 10 units on your work order, the usual operation time will be multiplied by 2.")
+ sequence = fields.Integer(
+ 'Sequence', default=1, required=True,
+ help="Gives the sequence order when displaying a list of work centers.")
+ color = fields.Integer('Color')
+ costs_hour = fields.Float(string='Cost per hour', help='Specify cost of work center per hour.', default=0.0)
+ time_start = fields.Float('Time before prod.', help="Time in minutes for the setup.")
+ time_stop = fields.Float('Time after prod.', help="Time in minutes for the cleaning.")
+ routing_line_ids = fields.One2many('mrp.routing.workcenter', 'workcenter_id', "Routing Lines")
+ order_ids = fields.One2many('mrp.workorder', 'workcenter_id', "Orders")
+ workorder_count = fields.Integer('# Work Orders', compute='_compute_workorder_count')
+ workorder_ready_count = fields.Integer('# Read Work Orders', compute='_compute_workorder_count')
+ workorder_progress_count = fields.Integer('Total Running Orders', compute='_compute_workorder_count')
+ workorder_pending_count = fields.Integer('Total Pending Orders', compute='_compute_workorder_count')
+ workorder_late_count = fields.Integer('Total Late Orders', compute='_compute_workorder_count')
+
+ time_ids = fields.One2many('mrp.workcenter.productivity', 'workcenter_id', 'Time Logs')
+ working_state = fields.Selection([
+ ('normal', 'Normal'),
+ ('blocked', 'Blocked'),
+ ('done', 'In Progress')], 'Workcenter Status', compute="_compute_working_state", store=True)
+ blocked_time = fields.Float(
+ 'Blocked Time', compute='_compute_blocked_time',
+ help='Blocked hours over the last month', digits=(16, 2))
+ productive_time = fields.Float(
+ 'Productive Time', compute='_compute_productive_time',
+ help='Productive hours over the last month', digits=(16, 2))
+ oee = fields.Float(compute='_compute_oee', help='Overall Equipment Effectiveness, based on the last month')
+ oee_target = fields.Float(string='OEE Target', help="Overall Effective Efficiency Target in percentage", default=90)
+ performance = fields.Integer('Performance', compute='_compute_performance', help='Performance over the last month')
+ workcenter_load = fields.Float('Work Center Load', compute='_compute_workorder_count')
+ alternative_workcenter_ids = fields.Many2many(
+ 'mrp.workcenter',
+ 'mrp_workcenter_alternative_rel',
+ 'workcenter_id',
+ 'alternative_workcenter_id',
+ domain="[('id', '!=', id), '|', ('company_id', '=', company_id), ('company_id', '=', False)]",
+ string="Alternative Workcenters", check_company=True,
+ help="Alternative workcenters that can be substituted to this one in order to dispatch production"
+ )
+
+ @api.constrains('alternative_workcenter_ids')
+ def _check_alternative_workcenter(self):
+ if self in self.alternative_workcenter_ids:
+ raise ValidationError(_("A workcenter cannot be an alternative of itself"))
+
+ @api.depends('order_ids.duration_expected', 'order_ids.workcenter_id', 'order_ids.state', 'order_ids.date_planned_start')
+ def _compute_workorder_count(self):
+ MrpWorkorder = self.env['mrp.workorder']
+ result = {wid: {} for wid in self._ids}
+ result_duration_expected = {wid: 0 for wid in self._ids}
+ #Count Late Workorder
+ data = MrpWorkorder.read_group([('workcenter_id', 'in', self.ids), ('state', 'in', ('pending', 'ready')), ('date_planned_start', '<', datetime.datetime.now().strftime('%Y-%m-%d'))], ['workcenter_id'], ['workcenter_id'])
+ count_data = dict((item['workcenter_id'][0], item['workcenter_id_count']) for item in data)
+ #Count All, Pending, Ready, Progress Workorder
+ res = MrpWorkorder.read_group(
+ [('workcenter_id', 'in', self.ids)],
+ ['workcenter_id', 'state', 'duration_expected'], ['workcenter_id', 'state'],
+ lazy=False)
+ for res_group in res:
+ result[res_group['workcenter_id'][0]][res_group['state']] = res_group['__count']
+ if res_group['state'] in ('pending', 'ready', 'progress'):
+ result_duration_expected[res_group['workcenter_id'][0]] += res_group['duration_expected']
+ for workcenter in self:
+ workcenter.workorder_count = sum(count for state, count in result[workcenter.id].items() if state not in ('done', 'cancel'))
+ workcenter.workorder_pending_count = result[workcenter.id].get('pending', 0)
+ workcenter.workcenter_load = result_duration_expected[workcenter.id]
+ workcenter.workorder_ready_count = result[workcenter.id].get('ready', 0)
+ workcenter.workorder_progress_count = result[workcenter.id].get('progress', 0)
+ workcenter.workorder_late_count = count_data.get(workcenter.id, 0)
+
+ @api.depends('time_ids', 'time_ids.date_end', 'time_ids.loss_type')
+ def _compute_working_state(self):
+ for workcenter in self:
+ # We search for a productivity line associated to this workcenter having no `date_end`.
+ # If we do not find one, the workcenter is not currently being used. If we find one, according
+ # to its `type_loss`, the workcenter is either being used or blocked.
+ time_log = self.env['mrp.workcenter.productivity'].search([
+ ('workcenter_id', '=', workcenter.id),
+ ('date_end', '=', False)
+ ], limit=1)
+ if not time_log:
+ # the workcenter is not being used
+ workcenter.working_state = 'normal'
+ elif time_log.loss_type in ('productive', 'performance'):
+ # the productivity line has a `loss_type` that means the workcenter is being used
+ workcenter.working_state = 'done'
+ else:
+ # the workcenter is blocked
+ workcenter.working_state = 'blocked'
+
+ def _compute_blocked_time(self):
+ # TDE FIXME: productivity loss type should be only losses, probably count other time logs differently ??
+ data = self.env['mrp.workcenter.productivity'].read_group([
+ ('date_start', '>=', fields.Datetime.to_string(datetime.datetime.now() - relativedelta.relativedelta(months=1))),
+ ('workcenter_id', 'in', self.ids),
+ ('date_end', '!=', False),
+ ('loss_type', '!=', 'productive')],
+ ['duration', 'workcenter_id'], ['workcenter_id'], lazy=False)
+ count_data = dict((item['workcenter_id'][0], item['duration']) for item in data)
+ for workcenter in self:
+ workcenter.blocked_time = count_data.get(workcenter.id, 0.0) / 60.0
+
+ def _compute_productive_time(self):
+ # TDE FIXME: productivity loss type should be only losses, probably count other time logs differently
+ data = self.env['mrp.workcenter.productivity'].read_group([
+ ('date_start', '>=', fields.Datetime.to_string(datetime.datetime.now() - relativedelta.relativedelta(months=1))),
+ ('workcenter_id', 'in', self.ids),
+ ('date_end', '!=', False),
+ ('loss_type', '=', 'productive')],
+ ['duration', 'workcenter_id'], ['workcenter_id'], lazy=False)
+ count_data = dict((item['workcenter_id'][0], item['duration']) for item in data)
+ for workcenter in self:
+ workcenter.productive_time = count_data.get(workcenter.id, 0.0) / 60.0
+
+ @api.depends('blocked_time', 'productive_time')
+ def _compute_oee(self):
+ for order in self:
+ if order.productive_time:
+ order.oee = round(order.productive_time * 100.0 / (order.productive_time + order.blocked_time), 2)
+ else:
+ order.oee = 0.0
+
+ def _compute_performance(self):
+ wo_data = self.env['mrp.workorder'].read_group([
+ ('date_start', '>=', fields.Datetime.to_string(datetime.datetime.now() - relativedelta.relativedelta(months=1))),
+ ('workcenter_id', 'in', self.ids),
+ ('state', '=', 'done')], ['duration_expected', 'workcenter_id', 'duration'], ['workcenter_id'], lazy=False)
+ duration_expected = dict((data['workcenter_id'][0], data['duration_expected']) for data in wo_data)
+ duration = dict((data['workcenter_id'][0], data['duration']) for data in wo_data)
+ for workcenter in self:
+ if duration.get(workcenter.id):
+ workcenter.performance = 100 * duration_expected.get(workcenter.id, 0.0) / duration[workcenter.id]
+ else:
+ workcenter.performance = 0.0
+
+ @api.constrains('capacity')
+ def _check_capacity(self):
+ if any(workcenter.capacity <= 0.0 for workcenter in self):
+ raise exceptions.UserError(_('The capacity must be strictly positive.'))
+
+ def unblock(self):
+ self.ensure_one()
+ if self.working_state != 'blocked':
+ raise exceptions.UserError(_("It has already been unblocked."))
+ times = self.env['mrp.workcenter.productivity'].search([('workcenter_id', '=', self.id), ('date_end', '=', False)])
+ times.write({'date_end': fields.Datetime.now()})
+ return {'type': 'ir.actions.client', 'tag': 'reload'}
+
+ @api.model_create_multi
+ def create(self, vals_list):
+ # resource_type is 'human' by default. As we are not living in
+ # /r/latestagecapitalism, workcenters are 'material'
+ records = super(MrpWorkcenter, self.with_context(default_resource_type='material')).create(vals_list)
+ return records
+
+ def write(self, vals):
+ if 'company_id' in vals:
+ self.resource_id.company_id = vals['company_id']
+ return super(MrpWorkcenter, self).write(vals)
+
+ def action_work_order(self):
+ action = self.env["ir.actions.actions"]._for_xml_id("mrp.action_work_orders")
+ return action
+
+ def _get_unavailability_intervals(self, start_datetime, end_datetime):
+ """Get the unavailabilities intervals for the workcenters in `self`.
+
+ Return the list of unavailabilities (a tuple of datetimes) indexed
+ by workcenter id.
+
+ :param start_datetime: filter unavailability with only slots after this start_datetime
+ :param end_datetime: filter unavailability with only slots before this end_datetime
+ :rtype: dict
+ """
+ unavailability_ressources = self.resource_id._get_unavailable_intervals(start_datetime, end_datetime)
+ return {wc.id: unavailability_ressources.get(wc.resource_id.id, []) for wc in self}
+
+ def _get_first_available_slot(self, start_datetime, duration):
+ """Get the first available interval for the workcenter in `self`.
+
+ The available interval is disjoinct with all other workorders planned on this workcenter, but
+ can overlap the time-off of the related calendar (inverse of the working hours).
+ Return the first available interval (start datetime, end datetime) or,
+ if there is none before 700 days, a tuple error (False, 'error message').
+
+ :param start_datetime: begin the search at this datetime
+ :param duration: minutes needed to make the workorder (float)
+ :rtype: tuple
+ """
+ self.ensure_one()
+ start_datetime, revert = make_aware(start_datetime)
+
+ get_available_intervals = partial(self.resource_calendar_id._work_intervals, domain=[('time_type', 'in', ['other', 'leave'])], resource=self.resource_id, tz=timezone(self.resource_calendar_id.tz))
+ get_workorder_intervals = partial(self.resource_calendar_id._leave_intervals, domain=[('time_type', '=', 'other')], resource=self.resource_id, tz=timezone(self.resource_calendar_id.tz))
+
+ remaining = duration
+ start_interval = start_datetime
+ delta = timedelta(days=14)
+
+ for n in range(50): # 50 * 14 = 700 days in advance (hardcoded)
+ dt = start_datetime + delta * n
+ available_intervals = get_available_intervals(dt, dt + delta)
+ workorder_intervals = get_workorder_intervals(dt, dt + delta)
+ for start, stop, dummy in available_intervals:
+ interval_minutes = (stop - start).total_seconds() / 60
+ # If the remaining minutes has never decrease update start_interval
+ if remaining == duration:
+ start_interval = start
+ # If there is a overlap between the possible available interval and a others WO
+ if Intervals([(start_interval, start + timedelta(minutes=min(remaining, interval_minutes)), dummy)]) & workorder_intervals:
+ remaining = duration
+ start_interval = start
+ elif float_compare(interval_minutes, remaining, precision_digits=3) >= 0:
+ return revert(start_interval), revert(start + timedelta(minutes=remaining))
+ # Decrease a part of the remaining duration
+ remaining -= interval_minutes
+ return False, 'Not available slot 700 days after the planned start'
+
+
+class MrpWorkcenterProductivityLossType(models.Model):
+ _name = "mrp.workcenter.productivity.loss.type"
+ _description = 'MRP Workorder productivity losses'
+ _rec_name = 'loss_type'
+
+ @api.depends('loss_type')
+ def name_get(self):
+ """ As 'category' field in form view is a Many2one, its value will be in
+ lower case. In order to display its value capitalized 'name_get' is
+ overrided.
+ """
+ result = []
+ for rec in self:
+ result.append((rec.id, rec.loss_type.title()))
+ return result
+
+ loss_type = fields.Selection([
+ ('availability', 'Availability'),
+ ('performance', 'Performance'),
+ ('quality', 'Quality'),
+ ('productive', 'Productive')], string='Category', default='availability', required=True)
+
+
+class MrpWorkcenterProductivityLoss(models.Model):
+ _name = "mrp.workcenter.productivity.loss"
+ _description = "Workcenter Productivity Losses"
+ _order = "sequence, id"
+
+ name = fields.Char('Blocking Reason', required=True)
+ sequence = fields.Integer('Sequence', default=1)
+ manual = fields.Boolean('Is a Blocking Reason', default=True)
+ loss_id = fields.Many2one('mrp.workcenter.productivity.loss.type', domain=([('loss_type', 'in', ['quality', 'availability'])]), string='Category')
+ loss_type = fields.Selection(string='Effectiveness Category', related='loss_id.loss_type', store=True, readonly=False)
+
+
+class MrpWorkcenterProductivity(models.Model):
+ _name = "mrp.workcenter.productivity"
+ _description = "Workcenter Productivity Log"
+ _order = "id desc"
+ _rec_name = "loss_id"
+ _check_company_auto = True
+
+ def _get_default_company_id(self):
+ company_id = False
+ if self.env.context.get('default_company_id'):
+ company_id = self.env.context['default_company_id']
+ if not company_id and self.env.context.get('default_workorder_id'):
+ workorder = self.env['mrp.workorder'].browse(self.env.context['default_workorder_id'])
+ company_id = workorder.company_id
+ if not company_id and self.env.context.get('default_workcenter_id'):
+ workcenter = self.env['mrp.workcenter'].browse(self.env.context['default_workcenter_id'])
+ company_id = workcenter.company_id
+ if not company_id:
+ company_id = self.env.company
+ return company_id
+
+ production_id = fields.Many2one('mrp.production', string='Manufacturing Order', related='workorder_id.production_id', readonly='True')
+ workcenter_id = fields.Many2one('mrp.workcenter', "Work Center", required=True, check_company=True)
+ company_id = fields.Many2one(
+ 'res.company', required=True, index=True,
+ default=lambda self: self._get_default_company_id())
+ workorder_id = fields.Many2one('mrp.workorder', 'Work Order', check_company=True)
+ user_id = fields.Many2one(
+ 'res.users', "User",
+ default=lambda self: self.env.uid)
+ loss_id = fields.Many2one(
+ 'mrp.workcenter.productivity.loss', "Loss Reason",
+ ondelete='restrict', required=True)
+ loss_type = fields.Selection(
+ string="Effectiveness", related='loss_id.loss_type', store=True, readonly=False)
+ description = fields.Text('Description')
+ date_start = fields.Datetime('Start Date', default=fields.Datetime.now, required=True)
+ date_end = fields.Datetime('End Date')
+ duration = fields.Float('Duration', compute='_compute_duration', store=True)
+
+ @api.depends('date_end', 'date_start')
+ def _compute_duration(self):
+ for blocktime in self:
+ if blocktime.date_start and blocktime.date_end:
+ d1 = fields.Datetime.from_string(blocktime.date_start)
+ d2 = fields.Datetime.from_string(blocktime.date_end)
+ diff = d2 - d1
+ if (blocktime.loss_type not in ('productive', 'performance')) and blocktime.workcenter_id.resource_calendar_id:
+ r = blocktime.workcenter_id._get_work_days_data_batch(d1, d2)[blocktime.workcenter_id.id]['hours']
+ blocktime.duration = round(r * 60, 2)
+ else:
+ blocktime.duration = round(diff.total_seconds() / 60.0, 2)
+ else:
+ blocktime.duration = 0.0
+
+ def button_block(self):
+ self.ensure_one()
+ self.workcenter_id.order_ids.end_all()
diff --git a/addons/mrp/models/mrp_workorder.py b/addons/mrp/models/mrp_workorder.py
new file mode 100644
index 00000000..f1167395
--- /dev/null
+++ b/addons/mrp/models/mrp_workorder.py
@@ -0,0 +1,816 @@
+# -*- coding: utf-8 -*-
+# Part of Odoo. See LICENSE file for full copyright and licensing details.
+
+from datetime import datetime, timedelta
+from dateutil.relativedelta import relativedelta
+from collections import defaultdict
+import json
+
+from odoo import api, fields, models, _, SUPERUSER_ID
+from odoo.exceptions import UserError
+from odoo.tools import float_compare, float_round, format_datetime
+
+
+class MrpWorkorder(models.Model):
+ _name = 'mrp.workorder'
+ _description = 'Work Order'
+
+ def _read_group_workcenter_id(self, workcenters, domain, order):
+ workcenter_ids = self.env.context.get('default_workcenter_id')
+ if not workcenter_ids:
+ workcenter_ids = workcenters._search([], order=order, access_rights_uid=SUPERUSER_ID)
+ return workcenters.browse(workcenter_ids)
+
+ name = fields.Char(
+ 'Work Order', required=True,
+ states={'done': [('readonly', True)], 'cancel': [('readonly', True)]})
+ workcenter_id = fields.Many2one(
+ 'mrp.workcenter', 'Work Center', required=True,
+ states={'done': [('readonly', True)], 'cancel': [('readonly', True)], 'progress': [('readonly', True)]},
+ group_expand='_read_group_workcenter_id', check_company=True)
+ working_state = fields.Selection(
+ string='Workcenter Status', related='workcenter_id.working_state', readonly=False,
+ help='Technical: used in views only')
+ product_id = fields.Many2one(related='production_id.product_id', readonly=True, store=True, check_company=True)
+ product_tracking = fields.Selection(related="product_id.tracking")
+ product_uom_id = fields.Many2one('uom.uom', 'Unit of Measure', required=True, readonly=True)
+ use_create_components_lots = fields.Boolean(related="production_id.picking_type_id.use_create_components_lots")
+ production_id = fields.Many2one('mrp.production', 'Manufacturing Order', required=True, check_company=True, readonly=True)
+ production_availability = fields.Selection(
+ string='Stock Availability', readonly=True,
+ related='production_id.reservation_state', store=True,
+ help='Technical: used in views and domains only.')
+ production_state = fields.Selection(
+ string='Production State', readonly=True,
+ related='production_id.state',
+ help='Technical: used in views only.')
+ production_bom_id = fields.Many2one('mrp.bom', related='production_id.bom_id')
+ qty_production = fields.Float('Original Production Quantity', readonly=True, related='production_id.product_qty')
+ company_id = fields.Many2one(related='production_id.company_id')
+ qty_producing = fields.Float(
+ compute='_compute_qty_producing', inverse='_set_qty_producing',
+ string='Currently Produced Quantity', digits='Product Unit of Measure')
+ qty_remaining = fields.Float('Quantity To Be Produced', compute='_compute_qty_remaining', digits='Product Unit of Measure')
+ qty_produced = fields.Float(
+ 'Quantity', default=0.0,
+ readonly=True,
+ digits='Product Unit of Measure',
+ copy=False,
+ help="The number of products already handled by this work order")
+ is_produced = fields.Boolean(string="Has Been Produced",
+ compute='_compute_is_produced')
+ state = fields.Selection([
+ ('pending', 'Waiting for another WO'),
+ ('ready', 'Ready'),
+ ('progress', 'In Progress'),
+ ('done', 'Finished'),
+ ('cancel', 'Cancelled')], string='Status',
+ default='pending', copy=False, readonly=True)
+ leave_id = fields.Many2one(
+ 'resource.calendar.leaves',
+ help='Slot into workcenter calendar once planned',
+ check_company=True, copy=False)
+ date_planned_start = fields.Datetime(
+ 'Scheduled Start Date',
+ compute='_compute_dates_planned',
+ inverse='_set_dates_planned',
+ states={'done': [('readonly', True)], 'cancel': [('readonly', True)]},
+ store=True, copy=False)
+ date_planned_finished = fields.Datetime(
+ 'Scheduled End Date',
+ compute='_compute_dates_planned',
+ inverse='_set_dates_planned',
+ states={'done': [('readonly', True)], 'cancel': [('readonly', True)]},
+ store=True, copy=False)
+ date_start = fields.Datetime(
+ 'Start Date', copy=False,
+ states={'done': [('readonly', True)], 'cancel': [('readonly', True)]})
+ date_finished = fields.Datetime(
+ 'End Date', copy=False,
+ states={'done': [('readonly', True)], 'cancel': [('readonly', True)]})
+
+ duration_expected = fields.Float(
+ 'Expected Duration', digits=(16, 2), default=60.0,
+ states={'done': [('readonly', True)], 'cancel': [('readonly', True)]},
+ help="Expected duration (in minutes)")
+ duration = fields.Float(
+ 'Real Duration', compute='_compute_duration', inverse='_set_duration',
+ readonly=False, store=True, copy=False)
+ duration_unit = fields.Float(
+ 'Duration Per Unit', compute='_compute_duration',
+ group_operator="avg", readonly=True, store=True)
+ duration_percent = fields.Integer(
+ 'Duration Deviation (%)', compute='_compute_duration',
+ group_operator="avg", readonly=True, store=True)
+ progress = fields.Float('Progress Done (%)', digits=(16, 2), compute='_compute_progress')
+
+ operation_id = fields.Many2one(
+ 'mrp.routing.workcenter', 'Operation', check_company=True)
+ # Should be used differently as BoM can change in the meantime
+ worksheet = fields.Binary(
+ 'Worksheet', related='operation_id.worksheet', readonly=True)
+ worksheet_type = fields.Selection(
+ string='Worksheet Type', related='operation_id.worksheet_type', readonly=True)
+ worksheet_google_slide = fields.Char(
+ 'Worksheet URL', related='operation_id.worksheet_google_slide', readonly=True)
+ operation_note = fields.Text("Description", related='operation_id.note', readonly=True)
+ move_raw_ids = fields.One2many(
+ 'stock.move', 'workorder_id', 'Raw Moves',
+ domain=[('raw_material_production_id', '!=', False), ('production_id', '=', False)])
+ move_finished_ids = fields.One2many(
+ 'stock.move', 'workorder_id', 'Finished Moves',
+ domain=[('raw_material_production_id', '=', False), ('production_id', '!=', False)])
+ move_line_ids = fields.One2many(
+ 'stock.move.line', 'workorder_id', 'Moves to Track',
+ help="Inventory moves for which you must scan a lot number at this work order")
+ finished_lot_id = fields.Many2one(
+ 'stock.production.lot', string='Lot/Serial Number', compute='_compute_finished_lot_id',
+ inverse='_set_finished_lot_id', domain="[('product_id', '=', product_id), ('company_id', '=', company_id)]",
+ check_company=True)
+ time_ids = fields.One2many(
+ 'mrp.workcenter.productivity', 'workorder_id', copy=False)
+ is_user_working = fields.Boolean(
+ 'Is the Current User Working', compute='_compute_working_users',
+ help="Technical field indicating whether the current user is working. ")
+ working_user_ids = fields.One2many('res.users', string='Working user on this work order.', compute='_compute_working_users')
+ last_working_user_id = fields.One2many('res.users', string='Last user that worked on this work order.', compute='_compute_working_users')
+
+ next_work_order_id = fields.Many2one('mrp.workorder', "Next Work Order", check_company=True)
+ scrap_ids = fields.One2many('stock.scrap', 'workorder_id')
+ scrap_count = fields.Integer(compute='_compute_scrap_move_count', string='Scrap Move')
+ production_date = fields.Datetime('Production Date', related='production_id.date_planned_start', store=True, readonly=False)
+ json_popover = fields.Char('Popover Data JSON', compute='_compute_json_popover')
+ show_json_popover = fields.Boolean('Show Popover?', compute='_compute_json_popover')
+ consumption = fields.Selection([
+ ('strict', 'Strict'),
+ ('warning', 'Warning'),
+ ('flexible', 'Flexible')],
+ required=True,
+ )
+
+ @api.depends('production_state', 'date_planned_start', 'date_planned_finished')
+ def _compute_json_popover(self):
+ previous_wo_data = self.env['mrp.workorder'].read_group(
+ [('next_work_order_id', 'in', self.ids)],
+ ['ids:array_agg(id)', 'date_planned_start:max', 'date_planned_finished:max'],
+ ['next_work_order_id'])
+ previous_wo_dict = dict([(x['next_work_order_id'][0], {
+ 'id': x['ids'][0],
+ 'date_planned_start': x['date_planned_start'],
+ 'date_planned_finished': x['date_planned_finished']})
+ for x in previous_wo_data])
+ if self.ids:
+ conflicted_dict = self._get_conflicted_workorder_ids()
+ for wo in self:
+ infos = []
+ if not wo.date_planned_start or not wo.date_planned_finished or not wo.ids:
+ wo.show_json_popover = False
+ wo.json_popover = False
+ continue
+ if wo.state in ['pending', 'ready']:
+ previous_wo = previous_wo_dict.get(wo.id)
+ prev_start = previous_wo and previous_wo['date_planned_start'] or False
+ prev_finished = previous_wo and previous_wo['date_planned_finished'] or False
+ if wo.state == 'pending' and prev_start and not (prev_start > wo.date_planned_start):
+ infos.append({
+ 'color': 'text-primary',
+ 'msg': _("Waiting the previous work order, planned from %(start)s to %(end)s",
+ start=format_datetime(self.env, prev_start, dt_format=False),
+ end=format_datetime(self.env, prev_finished, dt_format=False))
+ })
+ if wo.date_planned_finished < fields.Datetime.now():
+ infos.append({
+ 'color': 'text-warning',
+ 'msg': _("The work order should have already been processed.")
+ })
+ if prev_start and prev_start > wo.date_planned_start:
+ infos.append({
+ 'color': 'text-danger',
+ 'msg': _("Scheduled before the previous work order, planned from %(start)s to %(end)s",
+ start=format_datetime(self.env, prev_start, dt_format=False),
+ end=format_datetime(self.env, prev_finished, dt_format=False))
+ })
+ if conflicted_dict.get(wo.id):
+ infos.append({
+ 'color': 'text-danger',
+ 'msg': _("Planned at the same time as other workorder(s) at %s", wo.workcenter_id.display_name)
+ })
+ color_icon = infos and infos[-1]['color'] or False
+ wo.show_json_popover = bool(color_icon)
+ wo.json_popover = json.dumps({
+ 'infos': infos,
+ 'color': color_icon,
+ 'icon': 'fa-exclamation-triangle' if color_icon in ['text-warning', 'text-danger'] else 'fa-info-circle',
+ 'replan': color_icon not in [False, 'text-primary']
+ })
+
+ @api.depends('production_id.lot_producing_id')
+ def _compute_finished_lot_id(self):
+ for workorder in self:
+ workorder.finished_lot_id = workorder.production_id.lot_producing_id
+
+ def _set_finished_lot_id(self):
+ for workorder in self:
+ workorder.production_id.lot_producing_id = workorder.finished_lot_id
+
+ @api.depends('production_id.qty_producing')
+ def _compute_qty_producing(self):
+ for workorder in self:
+ workorder.qty_producing = workorder.production_id.qty_producing
+
+ def _set_qty_producing(self):
+ for workorder in self:
+ if workorder.qty_producing != 0 and workorder.production_id.qty_producing != workorder.qty_producing:
+ workorder.production_id.qty_producing = workorder.qty_producing
+ workorder.production_id._set_qty_producing()
+
+ # Both `date_planned_start` and `date_planned_finished` are related fields on `leave_id`. Let's say
+ # we slide a workorder on a gantt view, a single call to write is made with both
+ # fields Changes. As the ORM doesn't batch the write on related fields and instead
+ # makes multiple call, the constraint check_dates() is raised.
+ # That's why the compute and set methods are needed. to ensure the dates are updated
+ # in the same time.
+ @api.depends('leave_id')
+ def _compute_dates_planned(self):
+ for workorder in self:
+ workorder.date_planned_start = workorder.leave_id.date_from
+ workorder.date_planned_finished = workorder.leave_id.date_to
+
+ def _set_dates_planned(self):
+ date_from = self[0].date_planned_start
+ date_to = self[0].date_planned_finished
+ self.mapped('leave_id').sudo().write({
+ 'date_from': date_from,
+ 'date_to': date_to,
+ })
+
+ def name_get(self):
+ res = []
+ for wo in self:
+ if len(wo.production_id.workorder_ids) == 1:
+ res.append((wo.id, "%s - %s - %s" % (wo.production_id.name, wo.product_id.name, wo.name)))
+ else:
+ res.append((wo.id, "%s - %s - %s - %s" % (wo.production_id.workorder_ids.ids.index(wo._origin.id) + 1, wo.production_id.name, wo.product_id.name, wo.name)))
+ return res
+
+ def unlink(self):
+ # Removes references to workorder to avoid Validation Error
+ (self.mapped('move_raw_ids') | self.mapped('move_finished_ids')).write({'workorder_id': False})
+ self.mapped('leave_id').unlink()
+ mo_dirty = self.production_id.filtered(lambda mo: mo.state in ("confirmed", "progress", "to_close"))
+ res = super().unlink()
+ # We need to go through `_action_confirm` for all workorders of the current productions to
+ # make sure the links between them are correct (`next_work_order_id` could be obsolete now).
+ mo_dirty.workorder_ids._action_confirm()
+ return res
+
+ @api.depends('production_id.product_qty', 'qty_produced', 'production_id.product_uom_id')
+ def _compute_is_produced(self):
+ self.is_produced = False
+ for order in self.filtered(lambda p: p.production_id and p.production_id.product_uom_id):
+ rounding = order.production_id.product_uom_id.rounding
+ order.is_produced = float_compare(order.qty_produced, order.production_id.product_qty, precision_rounding=rounding) >= 0
+
+ @api.depends('time_ids.duration', 'qty_produced')
+ def _compute_duration(self):
+ for order in self:
+ order.duration = sum(order.time_ids.mapped('duration'))
+ order.duration_unit = round(order.duration / max(order.qty_produced, 1), 2) # rounding 2 because it is a time
+ if order.duration_expected:
+ order.duration_percent = 100 * (order.duration_expected - order.duration) / order.duration_expected
+ else:
+ order.duration_percent = 0
+
+ def _set_duration(self):
+
+ def _float_duration_to_second(duration):
+ minutes = duration // 1
+ seconds = (duration % 1) * 60
+ return minutes * 60 + seconds
+
+ for order in self:
+ old_order_duation = sum(order.time_ids.mapped('duration'))
+ new_order_duration = order.duration
+ if new_order_duration == old_order_duation:
+ continue
+
+ delta_duration = new_order_duration - old_order_duation
+
+ if delta_duration > 0:
+ date_start = datetime.now() - timedelta(seconds=_float_duration_to_second(delta_duration))
+ self.env['mrp.workcenter.productivity'].create(
+ order._prepare_timeline_vals(delta_duration, date_start, datetime.now())
+ )
+ else:
+ duration_to_remove = abs(delta_duration)
+ timelines = order.time_ids.sorted(lambda t: t.date_start)
+ timelines_to_unlink = self.env['mrp.workcenter.productivity']
+ for timeline in timelines:
+ if duration_to_remove <= 0.0:
+ break
+ if timeline.duration <= duration_to_remove:
+ duration_to_remove -= timeline.duration
+ timelines_to_unlink |= timeline
+ else:
+ new_time_line_duration = timeline.duration - duration_to_remove
+ timeline.date_start = timeline.date_end - timedelta(seconds=_float_duration_to_second(new_time_line_duration))
+ break
+ timelines_to_unlink.unlink()
+
+ @api.depends('duration', 'duration_expected', 'state')
+ def _compute_progress(self):
+ for order in self:
+ if order.state == 'done':
+ order.progress = 100
+ elif order.duration_expected:
+ order.progress = order.duration * 100 / order.duration_expected
+ else:
+ order.progress = 0
+
+ def _compute_working_users(self):
+ """ Checks whether the current user is working, all the users currently working and the last user that worked. """
+ for order in self:
+ order.working_user_ids = [(4, order.id) for order in order.time_ids.filtered(lambda time: not time.date_end).sorted('date_start').mapped('user_id')]
+ if order.working_user_ids:
+ order.last_working_user_id = order.working_user_ids[-1]
+ elif order.time_ids:
+ order.last_working_user_id = order.time_ids.filtered('date_end').sorted('date_end')[-1].user_id if order.time_ids.filtered('date_end') else order.time_ids[-1].user_id
+ else:
+ order.last_working_user_id = False
+ if order.time_ids.filtered(lambda x: (x.user_id.id == self.env.user.id) and (not x.date_end) and (x.loss_type in ('productive', 'performance'))):
+ order.is_user_working = True
+ else:
+ order.is_user_working = False
+
+ def _compute_scrap_move_count(self):
+ data = self.env['stock.scrap'].read_group([('workorder_id', 'in', self.ids)], ['workorder_id'], ['workorder_id'])
+ count_data = dict((item['workorder_id'][0], item['workorder_id_count']) for item in data)
+ for workorder in self:
+ workorder.scrap_count = count_data.get(workorder.id, 0)
+
+ @api.onchange('date_planned_finished')
+ def _onchange_date_planned_finished(self):
+ if self.date_planned_start and self.date_planned_finished:
+ interval = self.workcenter_id.resource_calendar_id.get_work_duration_data(
+ self.date_planned_start, self.date_planned_finished,
+ domain=[('time_type', 'in', ['leave', 'other'])]
+ )
+ self.duration_expected = interval['hours'] * 60
+
+ @api.onchange('operation_id')
+ def _onchange_operation_id(self):
+ if self.operation_id:
+ self.name = self.operation_id.name
+ self.workcenter_id = self.operation_id.workcenter_id.id
+
+ @api.onchange('date_planned_start', 'duration_expected')
+ def _onchange_date_planned_start(self):
+ if self.date_planned_start and self.duration_expected:
+ self.date_planned_finished = self.workcenter_id.resource_calendar_id.plan_hours(
+ self.duration_expected / 60.0, self.date_planned_start,
+ compute_leaves=True, domain=[('time_type', 'in', ['leave', 'other'])]
+ )
+
+ @api.onchange('operation_id', 'workcenter_id', 'qty_production')
+ def _onchange_expected_duration(self):
+ self.duration_expected = self._get_duration_expected()
+
+ def write(self, values):
+ if 'production_id' in values:
+ raise UserError(_('You cannot link this work order to another manufacturing order.'))
+ if 'workcenter_id' in values:
+ for workorder in self:
+ if workorder.workcenter_id.id != values['workcenter_id']:
+ if workorder.state in ('progress', 'done', 'cancel'):
+ raise UserError(_('You cannot change the workcenter of a work order that is in progress or done.'))
+ workorder.leave_id.resource_id = self.env['mrp.workcenter'].browse(values['workcenter_id']).resource_id
+ if 'date_planned_start' in values or 'date_planned_finished' in values:
+ for workorder in self:
+ start_date = fields.Datetime.to_datetime(values.get('date_planned_start')) or workorder.date_planned_start
+ end_date = fields.Datetime.to_datetime(values.get('date_planned_finished')) or workorder.date_planned_finished
+ if start_date and end_date and start_date > end_date:
+ raise UserError(_('The planned end date of the work order cannot be prior to the planned start date, please correct this to save the work order.'))
+ # Update MO dates if the start date of the first WO or the
+ # finished date of the last WO is update.
+ if workorder == workorder.production_id.workorder_ids[0] and 'date_planned_start' in values:
+ if values['date_planned_start']:
+ workorder.production_id.with_context(force_date=True).write({
+ 'date_planned_start': fields.Datetime.to_datetime(values['date_planned_start'])
+ })
+ if workorder == workorder.production_id.workorder_ids[-1] and 'date_planned_finished' in values:
+ if values['date_planned_finished']:
+ workorder.production_id.with_context(force_date=True).write({
+ 'date_planned_finished': fields.Datetime.to_datetime(values['date_planned_finished'])
+ })
+ return super(MrpWorkorder, self).write(values)
+
+ @api.model_create_multi
+ def create(self, values):
+ res = super().create(values)
+ # Auto-confirm manually added workorders.
+ # We need to go through `_action_confirm` for all workorders of the current productions to
+ # make sure the links between them are correct.
+ to_confirm = res.filtered(lambda wo: wo.production_id.state in ("confirmed", "progress", "to_close"))
+ to_confirm = to_confirm.production_id.workorder_ids
+ to_confirm._action_confirm()
+ return res
+
+ def _action_confirm(self):
+ workorders_by_production = defaultdict(lambda: self.env['mrp.workorder'])
+ for workorder in self:
+ workorders_by_production[workorder.production_id] |= workorder
+
+ for production, workorders in workorders_by_production.items():
+ workorders_by_bom = defaultdict(lambda: self.env['mrp.workorder'])
+ bom = self.env['mrp.bom']
+ moves = production.move_raw_ids | production.move_finished_ids
+
+ for workorder in self:
+ if workorder.operation_id.bom_id:
+ bom = workorder.operation_id.bom_id
+ if not bom:
+ bom = workorder.production_id.bom_id
+ previous_workorder = workorders_by_bom[bom][-1:]
+ previous_workorder.next_work_order_id = workorder.id
+ workorders_by_bom[bom] |= workorder
+
+ moves.filtered(lambda m: m.operation_id == workorder.operation_id).write({
+ 'workorder_id': workorder.id
+ })
+
+ exploded_boms, dummy = production.bom_id.explode(production.product_id, 1, picking_type=production.bom_id.picking_type_id)
+ exploded_boms = {b[0]: b[1] for b in exploded_boms}
+ for move in moves:
+ if move.workorder_id:
+ continue
+ bom = move.bom_line_id.bom_id
+ while bom and bom not in workorders_by_bom:
+ bom_data = exploded_boms.get(bom, {})
+ bom = bom_data.get('parent_line') and bom_data['parent_line'].bom_id or False
+ if bom in workorders_by_bom:
+ move.write({
+ 'workorder_id': workorders_by_bom[bom][-1:].id
+ })
+ else:
+ move.write({
+ 'workorder_id': workorders_by_bom[production.bom_id][-1:].id
+ })
+
+ for workorders in workorders_by_bom.values():
+ if not workorders:
+ continue
+ if workorders[0].state == 'pending':
+ workorders[0].state = 'ready'
+ for workorder in workorders:
+ workorder._start_nextworkorder()
+
+ def _get_byproduct_move_to_update(self):
+ return self.production_id.move_finished_ids.filtered(lambda x: (x.product_id.id != self.production_id.product_id.id) and (x.state not in ('done', 'cancel')))
+
+ def _start_nextworkorder(self):
+ if self.state == 'done' and self.next_work_order_id.state == 'pending':
+ self.next_work_order_id.state = 'ready'
+
+ @api.model
+ def gantt_unavailability(self, start_date, end_date, scale, group_bys=None, rows=None):
+ """Get unavailabilities data to display in the Gantt view."""
+ workcenter_ids = set()
+
+ def traverse_inplace(func, row, **kargs):
+ res = func(row, **kargs)
+ if res:
+ kargs.update(res)
+ for row in row.get('rows'):
+ traverse_inplace(func, row, **kargs)
+
+ def search_workcenter_ids(row):
+ if row.get('groupedBy') and row.get('groupedBy')[0] == 'workcenter_id' and row.get('resId'):
+ workcenter_ids.add(row.get('resId'))
+
+ for row in rows:
+ traverse_inplace(search_workcenter_ids, row)
+ start_datetime = fields.Datetime.to_datetime(start_date)
+ end_datetime = fields.Datetime.to_datetime(end_date)
+ workcenters = self.env['mrp.workcenter'].browse(workcenter_ids)
+ unavailability_mapping = workcenters._get_unavailability_intervals(start_datetime, end_datetime)
+
+ # Only notable interval (more than one case) is send to the front-end (avoid sending useless information)
+ cell_dt = (scale in ['day', 'week'] and timedelta(hours=1)) or (scale == 'month' and timedelta(days=1)) or timedelta(days=28)
+
+ def add_unavailability(row, workcenter_id=None):
+ if row.get('groupedBy') and row.get('groupedBy')[0] == 'workcenter_id' and row.get('resId'):
+ workcenter_id = row.get('resId')
+ if workcenter_id:
+ notable_intervals = filter(lambda interval: interval[1] - interval[0] >= cell_dt, unavailability_mapping[workcenter_id])
+ row['unavailabilities'] = [{'start': interval[0], 'stop': interval[1]} for interval in notable_intervals]
+ return {'workcenter_id': workcenter_id}
+
+ for row in rows:
+ traverse_inplace(add_unavailability, row)
+ return rows
+
+ def button_start(self):
+ self.ensure_one()
+ # As button_start is automatically called in the new view
+ if self.state in ('done', 'cancel'):
+ return True
+
+ if self.product_tracking == 'serial':
+ self.qty_producing = 1.0
+ else:
+ self.qty_producing = self.qty_remaining
+
+ self.env['mrp.workcenter.productivity'].create(
+ self._prepare_timeline_vals(self.duration, datetime.now())
+ )
+ if self.production_id.state != 'progress':
+ self.production_id.write({
+ 'date_start': datetime.now(),
+ })
+ if self.state == 'progress':
+ return True
+ start_date = datetime.now()
+ vals = {
+ 'state': 'progress',
+ 'date_start': start_date,
+ }
+ if not self.leave_id:
+ leave = self.env['resource.calendar.leaves'].create({
+ 'name': self.display_name,
+ 'calendar_id': self.workcenter_id.resource_calendar_id.id,
+ 'date_from': start_date,
+ 'date_to': start_date + relativedelta(minutes=self.duration_expected),
+ 'resource_id': self.workcenter_id.resource_id.id,
+ 'time_type': 'other'
+ })
+ vals['leave_id'] = leave.id
+ return self.write(vals)
+ else:
+ if self.date_planned_start > start_date:
+ vals['date_planned_start'] = start_date
+ if self.date_planned_finished and self.date_planned_finished < start_date:
+ vals['date_planned_finished'] = start_date
+ return self.write(vals)
+
+ def button_finish(self):
+ end_date = datetime.now()
+ for workorder in self:
+ if workorder.state in ('done', 'cancel'):
+ continue
+ workorder.end_all()
+ vals = {
+ 'qty_produced': workorder.qty_produced or workorder.qty_producing or workorder.qty_production,
+ 'state': 'done',
+ 'date_finished': end_date,
+ 'date_planned_finished': end_date
+ }
+ if not workorder.date_start:
+ vals['date_start'] = end_date
+ if not workorder.date_planned_start or end_date < workorder.date_planned_start:
+ vals['date_planned_start'] = end_date
+ workorder.write(vals)
+
+ workorder._start_nextworkorder()
+ return True
+
+ def end_previous(self, doall=False):
+ """
+ @param: doall: This will close all open time lines on the open work orders when doall = True, otherwise
+ only the one of the current user
+ """
+ # TDE CLEANME
+ timeline_obj = self.env['mrp.workcenter.productivity']
+ domain = [('workorder_id', 'in', self.ids), ('date_end', '=', False)]
+ if not doall:
+ domain.append(('user_id', '=', self.env.user.id))
+ not_productive_timelines = timeline_obj.browse()
+ for timeline in timeline_obj.search(domain, limit=None if doall else 1):
+ wo = timeline.workorder_id
+ if wo.duration_expected <= wo.duration:
+ if timeline.loss_type == 'productive':
+ not_productive_timelines += timeline
+ timeline.write({'date_end': fields.Datetime.now()})
+ else:
+ maxdate = fields.Datetime.from_string(timeline.date_start) + relativedelta(minutes=wo.duration_expected - wo.duration)
+ enddate = datetime.now()
+ if maxdate > enddate:
+ timeline.write({'date_end': enddate})
+ else:
+ timeline.write({'date_end': maxdate})
+ not_productive_timelines += timeline.copy({'date_start': maxdate, 'date_end': enddate})
+ if not_productive_timelines:
+ loss_id = self.env['mrp.workcenter.productivity.loss'].search([('loss_type', '=', 'performance')], limit=1)
+ if not len(loss_id):
+ raise UserError(_("You need to define at least one unactive productivity loss in the category 'Performance'. Create one from the Manufacturing app, menu: Configuration / Productivity Losses."))
+ not_productive_timelines.write({'loss_id': loss_id.id})
+ return True
+
+ def end_all(self):
+ return self.end_previous(doall=True)
+
+ def button_pending(self):
+ self.end_previous()
+ return True
+
+ def button_unblock(self):
+ for order in self:
+ order.workcenter_id.unblock()
+ return True
+
+ def action_cancel(self):
+ self.leave_id.unlink()
+ return self.write({
+ 'state': 'cancel',
+ 'date_planned_start': False,
+ 'date_planned_finished': False,
+ })
+
+ def action_replan(self):
+ """Replan a work order.
+
+ It actually replans every "ready" or "pending"
+ work orders of the linked manufacturing orders.
+ """
+ for production in self.production_id:
+ production._plan_workorders(replan=True)
+ return True
+
+ def button_done(self):
+ if any(x.state in ('done', 'cancel') for x in self):
+ raise UserError(_('A Manufacturing Order is already done or cancelled.'))
+ self.end_all()
+ end_date = datetime.now()
+ return self.write({
+ 'state': 'done',
+ 'date_finished': end_date,
+ 'date_planned_finished': end_date,
+ })
+
+ def button_scrap(self):
+ self.ensure_one()
+ return {
+ 'name': _('Scrap'),
+ 'view_mode': 'form',
+ 'res_model': 'stock.scrap',
+ 'view_id': self.env.ref('stock.stock_scrap_form_view2').id,
+ 'type': 'ir.actions.act_window',
+ 'context': {'default_company_id': self.production_id.company_id.id,
+ 'default_workorder_id': self.id,
+ 'default_production_id': self.production_id.id,
+ 'product_ids': (self.production_id.move_raw_ids.filtered(lambda x: x.state not in ('done', 'cancel')) | self.production_id.move_finished_ids.filtered(lambda x: x.state == 'done')).mapped('product_id').ids},
+ 'target': 'new',
+ }
+
+ def action_see_move_scrap(self):
+ self.ensure_one()
+ action = self.env["ir.actions.actions"]._for_xml_id("stock.action_stock_scrap")
+ action['domain'] = [('workorder_id', '=', self.id)]
+ return action
+
+ def action_open_wizard(self):
+ self.ensure_one()
+ action = self.env["ir.actions.actions"]._for_xml_id("mrp.mrp_workorder_mrp_production_form")
+ action['res_id'] = self.id
+ return action
+
+ @api.depends('qty_production', 'qty_produced')
+ def _compute_qty_remaining(self):
+ for wo in self:
+ wo.qty_remaining = float_round(wo.qty_production - wo.qty_produced, precision_rounding=wo.production_id.product_uom_id.rounding)
+
+ def _get_duration_expected(self, alternative_workcenter=False, ratio=1):
+ self.ensure_one()
+ if not self.workcenter_id:
+ return self.duration_expected
+ if not self.operation_id:
+ duration_expected_working = (self.duration_expected - self.workcenter_id.time_start - self.workcenter_id.time_stop) * self.workcenter_id.time_efficiency / 100.0
+ if duration_expected_working < 0:
+ duration_expected_working = 0
+ return self.workcenter_id.time_start + self.workcenter_id.time_stop + duration_expected_working * ratio * 100.0 / self.workcenter_id.time_efficiency
+ qty_production = self.production_id.product_uom_id._compute_quantity(self.qty_production, self.production_id.product_id.uom_id)
+ cycle_number = float_round(qty_production / self.workcenter_id.capacity, precision_digits=0, rounding_method='UP')
+ if alternative_workcenter:
+ # TODO : find a better alternative : the settings of workcenter can change
+ duration_expected_working = (self.duration_expected - self.workcenter_id.time_start - self.workcenter_id.time_stop) * self.workcenter_id.time_efficiency / (100.0 * cycle_number)
+ if duration_expected_working < 0:
+ duration_expected_working = 0
+ return alternative_workcenter.time_start + alternative_workcenter.time_stop + cycle_number * duration_expected_working * 100.0 / alternative_workcenter.time_efficiency
+ time_cycle = self.operation_id.time_cycle
+ return self.workcenter_id.time_start + self.workcenter_id.time_stop + cycle_number * time_cycle * 100.0 / self.workcenter_id.time_efficiency
+
+ def _get_conflicted_workorder_ids(self):
+ """Get conlicted workorder(s) with self.
+
+ Conflict means having two workorders in the same time in the same workcenter.
+
+ :return: defaultdict with key as workorder id of self and value as related conflicted workorder
+ """
+ self.flush(['state', 'date_planned_start', 'date_planned_finished', 'workcenter_id'])
+ sql = """
+ SELECT wo1.id, wo2.id
+ FROM mrp_workorder wo1, mrp_workorder wo2
+ WHERE
+ wo1.id IN %s
+ AND wo1.state IN ('pending','ready')
+ AND wo2.state IN ('pending','ready')
+ AND wo1.id != wo2.id
+ AND wo1.workcenter_id = wo2.workcenter_id
+ AND (DATE_TRUNC('second', wo2.date_planned_start), DATE_TRUNC('second', wo2.date_planned_finished))
+ OVERLAPS (DATE_TRUNC('second', wo1.date_planned_start), DATE_TRUNC('second', wo1.date_planned_finished))
+ """
+ self.env.cr.execute(sql, [tuple(self.ids)])
+ res = defaultdict(list)
+ for wo1, wo2 in self.env.cr.fetchall():
+ res[wo1].append(wo2)
+ return res
+
+ @api.model
+ def _prepare_component_quantity(self, move, qty_producing):
+ """ helper that computes quantity to consume (or to create in case of byproduct)
+ depending on the quantity producing and the move's unit factor"""
+ if move.product_id.tracking == 'serial':
+ uom = move.product_id.uom_id
+ else:
+ uom = move.product_uom
+ return move.product_uom._compute_quantity(
+ qty_producing * move.unit_factor,
+ uom,
+ round=False
+ )
+
+ def _prepare_timeline_vals(self, duration, date_start, date_end=False):
+ # Need a loss in case of the real time exceeding the expected
+ if not self.duration_expected or duration < self.duration_expected:
+ loss_id = self.env['mrp.workcenter.productivity.loss'].search([('loss_type', '=', 'productive')], limit=1)
+ if not len(loss_id):
+ raise UserError(_("You need to define at least one productivity loss in the category 'Productivity'. Create one from the Manufacturing app, menu: Configuration / Productivity Losses."))
+ else:
+ loss_id = self.env['mrp.workcenter.productivity.loss'].search([('loss_type', '=', 'performance')], limit=1)
+ if not len(loss_id):
+ raise UserError(_("You need to define at least one productivity loss in the category 'Performance'. Create one from the Manufacturing app, menu: Configuration / Productivity Losses."))
+ return {
+ 'workorder_id': self.id,
+ 'workcenter_id': self.workcenter_id.id,
+ 'description': _('Time Tracking: %(user)s', user=self.env.user.name),
+ 'loss_id': loss_id[0].id,
+ 'date_start': date_start,
+ 'date_end': date_end,
+ 'user_id': self.env.user.id, # FIXME sle: can be inconsistent with company_id
+ 'company_id': self.company_id.id,
+ }
+
+ def _update_finished_move(self):
+ """ Update the finished move & move lines in order to set the finished
+ product lot on it as well as the produced quantity. This method get the
+ information either from the last workorder or from the Produce wizard."""
+ production_move = self.production_id.move_finished_ids.filtered(
+ lambda move: move.product_id == self.product_id and
+ move.state not in ('done', 'cancel')
+ )
+ if not production_move:
+ return
+ if production_move.product_id.tracking != 'none':
+ if not self.finished_lot_id:
+ raise UserError(_('You need to provide a lot for the finished product.'))
+ move_line = production_move.move_line_ids.filtered(
+ lambda line: line.lot_id.id == self.finished_lot_id.id
+ )
+ if move_line:
+ if self.product_id.tracking == 'serial':
+ raise UserError(_('You cannot produce the same serial number twice.'))
+ move_line.product_uom_qty += self.qty_producing
+ move_line.qty_done += self.qty_producing
+ else:
+ location_dest_id = production_move.location_dest_id._get_putaway_strategy(self.product_id).id or production_move.location_dest_id.id
+ move_line.create({
+ 'move_id': production_move.id,
+ 'product_id': production_move.product_id.id,
+ 'lot_id': self.finished_lot_id.id,
+ 'product_uom_qty': self.qty_producing,
+ 'product_uom_id': self.product_uom_id.id,
+ 'qty_done': self.qty_producing,
+ 'location_id': production_move.location_id.id,
+ 'location_dest_id': location_dest_id,
+ })
+ else:
+ rounding = production_move.product_uom.rounding
+ production_move._set_quantity_done(
+ float_round(self.qty_producing, precision_rounding=rounding)
+ )
+
+ def _check_sn_uniqueness(self):
+ """ Alert the user if the serial number as already been produced """
+ if self.product_tracking == 'serial' and self.finished_lot_id:
+ sml = self.env['stock.move.line'].search_count([
+ ('lot_id', '=', self.finished_lot_id.id),
+ ('location_id.usage', '=', 'production'),
+ ('qty_done', '=', 1),
+ ('state', '=', 'done')
+ ])
+ if sml:
+ raise UserError(_('This serial number for product %s has already been produced', self.product_id.name))
+
+ def _update_qty_producing(self, quantity):
+ self.ensure_one()
+ if self.qty_producing:
+ self.qty_producing = quantity
diff --git a/addons/mrp/models/product.py b/addons/mrp/models/product.py
new file mode 100644
index 00000000..ade9617d
--- /dev/null
+++ b/addons/mrp/models/product.py
@@ -0,0 +1,209 @@
+# -*- coding: utf-8 -*-
+# Part of Odoo. See LICENSE file for full copyright and licensing details.
+
+from datetime import timedelta
+from odoo import api, fields, models
+from odoo.tools.float_utils import float_round, float_is_zero
+
+
+class ProductTemplate(models.Model):
+ _inherit = "product.template"
+
+ bom_line_ids = fields.One2many('mrp.bom.line', 'product_tmpl_id', 'BoM Components')
+ bom_ids = fields.One2many('mrp.bom', 'product_tmpl_id', 'Bill of Materials')
+ bom_count = fields.Integer('# Bill of Material',
+ compute='_compute_bom_count', compute_sudo=False)
+ used_in_bom_count = fields.Integer('# of BoM Where is Used',
+ compute='_compute_used_in_bom_count', compute_sudo=False)
+ mrp_product_qty = fields.Float('Manufactured',
+ compute='_compute_mrp_product_qty', compute_sudo=False)
+ produce_delay = fields.Float(
+ 'Manufacturing Lead Time', default=0.0,
+ help="Average lead time in days to manufacture this product. In the case of multi-level BOM, the manufacturing lead times of the components will be added.")
+
+ def _compute_bom_count(self):
+ for product in self:
+ product.bom_count = self.env['mrp.bom'].search_count([('product_tmpl_id', '=', product.id)])
+
+ def _compute_used_in_bom_count(self):
+ for template in self:
+ template.used_in_bom_count = self.env['mrp.bom'].search_count(
+ [('bom_line_ids.product_tmpl_id', '=', template.id)])
+
+ def write(self, values):
+ if 'active' in values:
+ self.filtered(lambda p: p.active != values['active']).with_context(active_test=False).bom_ids.write({
+ 'active': values['active']
+ })
+ return super().write(values)
+
+ def action_used_in_bom(self):
+ self.ensure_one()
+ action = self.env["ir.actions.actions"]._for_xml_id("mrp.mrp_bom_form_action")
+ action['domain'] = [('bom_line_ids.product_tmpl_id', '=', self.id)]
+ return action
+
+ def _compute_mrp_product_qty(self):
+ for template in self:
+ template.mrp_product_qty = float_round(sum(template.mapped('product_variant_ids').mapped('mrp_product_qty')), precision_rounding=template.uom_id.rounding)
+
+ def action_view_mos(self):
+ action = self.env["ir.actions.actions"]._for_xml_id("mrp.mrp_production_report")
+ action['domain'] = [('state', '=', 'done'), ('product_tmpl_id', 'in', self.ids)]
+ action['context'] = {
+ 'graph_measure': 'product_uom_qty',
+ 'time_ranges': {'field': 'date_planned_start', 'range': 'last_365_days'}
+ }
+ return action
+
+
+class ProductProduct(models.Model):
+ _inherit = "product.product"
+
+ variant_bom_ids = fields.One2many('mrp.bom', 'product_id', 'BOM Product Variants')
+ bom_line_ids = fields.One2many('mrp.bom.line', 'product_id', 'BoM Components')
+ bom_count = fields.Integer('# Bill of Material',
+ compute='_compute_bom_count', compute_sudo=False)
+ used_in_bom_count = fields.Integer('# BoM Where Used',
+ compute='_compute_used_in_bom_count', compute_sudo=False)
+ mrp_product_qty = fields.Float('Manufactured',
+ compute='_compute_mrp_product_qty', compute_sudo=False)
+
+ def _compute_bom_count(self):
+ for product in self:
+ product.bom_count = self.env['mrp.bom'].search_count(['|', ('product_id', '=', product.id), '&', ('product_id', '=', False), ('product_tmpl_id', '=', product.product_tmpl_id.id)])
+
+ def _compute_used_in_bom_count(self):
+ for product in self:
+ product.used_in_bom_count = self.env['mrp.bom'].search_count([('bom_line_ids.product_id', '=', product.id)])
+
+ def write(self, values):
+ if 'active' in values:
+ self.filtered(lambda p: p.active != values['active']).with_context(active_test=False).variant_bom_ids.write({
+ 'active': values['active']
+ })
+ return super().write(values)
+
+ def get_components(self):
+ """ Return the components list ids in case of kit product.
+ Return the product itself otherwise"""
+ self.ensure_one()
+ bom_kit = self.env['mrp.bom']._bom_find(product=self, bom_type='phantom')
+ if bom_kit:
+ boms, bom_sub_lines = bom_kit.explode(self, 1)
+ return [bom_line.product_id.id for bom_line, data in bom_sub_lines if bom_line.product_id.type == 'product']
+ else:
+ return super(ProductProduct, self).get_components()
+
+ def action_used_in_bom(self):
+ self.ensure_one()
+ action = self.env["ir.actions.actions"]._for_xml_id("mrp.mrp_bom_form_action")
+ action['domain'] = [('bom_line_ids.product_id', '=', self.id)]
+ return action
+
+ def _compute_mrp_product_qty(self):
+ date_from = fields.Datetime.to_string(fields.datetime.now() - timedelta(days=365))
+ #TODO: state = done?
+ domain = [('state', '=', 'done'), ('product_id', 'in', self.ids), ('date_planned_start', '>', date_from)]
+ read_group_res = self.env['mrp.production'].read_group(domain, ['product_id', 'product_uom_qty'], ['product_id'])
+ mapped_data = dict([(data['product_id'][0], data['product_uom_qty']) for data in read_group_res])
+ for product in self:
+ if not product.id:
+ product.mrp_product_qty = 0.0
+ continue
+ product.mrp_product_qty = float_round(mapped_data.get(product.id, 0), precision_rounding=product.uom_id.rounding)
+
+ def _compute_quantities_dict(self, lot_id, owner_id, package_id, from_date=False, to_date=False):
+ """ When the product is a kit, this override computes the fields :
+ - 'virtual_available'
+ - 'qty_available'
+ - 'incoming_qty'
+ - 'outgoing_qty'
+ - 'free_qty'
+
+ This override is used to get the correct quantities of products
+ with 'phantom' as BoM type.
+ """
+ bom_kits = self.env['mrp.bom']._get_product2bom(self, bom_type='phantom')
+ kits = self.filtered(lambda p: bom_kits.get(p))
+ res = super(ProductProduct, self - kits)._compute_quantities_dict(lot_id, owner_id, package_id, from_date=from_date, to_date=to_date)
+ for product in bom_kits:
+ boms, bom_sub_lines = bom_kits[product].explode(product, 1)
+ ratios_virtual_available = []
+ ratios_qty_available = []
+ ratios_incoming_qty = []
+ ratios_outgoing_qty = []
+ ratios_free_qty = []
+ for bom_line, bom_line_data in bom_sub_lines:
+ component = bom_line.product_id
+ if component.type != 'product' or float_is_zero(bom_line_data['qty'], precision_rounding=bom_line.product_uom_id.rounding):
+ # As BoMs allow components with 0 qty, a.k.a. optionnal components, we simply skip those
+ # to avoid a division by zero. The same logic is applied to non-storable products as those
+ # products have 0 qty available.
+ continue
+ uom_qty_per_kit = bom_line_data['qty'] / bom_line_data['original_qty']
+ qty_per_kit = bom_line.product_uom_id._compute_quantity(uom_qty_per_kit, bom_line.product_id.uom_id, raise_if_failure=False)
+ if not qty_per_kit:
+ continue
+ component_res = res.get(component.id, {
+ "virtual_available": component.virtual_available,
+ "qty_available": component.qty_available,
+ "incoming_qty": component.incoming_qty,
+ "outgoing_qty": component.outgoing_qty,
+ "free_qty": component.free_qty,
+ })
+ ratios_virtual_available.append(component_res["virtual_available"] / qty_per_kit)
+ ratios_qty_available.append(component_res["qty_available"] / qty_per_kit)
+ ratios_incoming_qty.append(component_res["incoming_qty"] / qty_per_kit)
+ ratios_outgoing_qty.append(component_res["outgoing_qty"] / qty_per_kit)
+ ratios_free_qty.append(component_res["free_qty"] / qty_per_kit)
+ if bom_sub_lines and ratios_virtual_available: # Guard against all cnsumable bom: at least one ratio should be present.
+ res[product.id] = {
+ 'virtual_available': min(ratios_virtual_available) // 1,
+ 'qty_available': min(ratios_qty_available) // 1,
+ 'incoming_qty': min(ratios_incoming_qty) // 1,
+ 'outgoing_qty': min(ratios_outgoing_qty) // 1,
+ 'free_qty': min(ratios_free_qty) // 1,
+ }
+ else:
+ res[product.id] = {
+ 'virtual_available': 0,
+ 'qty_available': 0,
+ 'incoming_qty': 0,
+ 'outgoing_qty': 0,
+ 'free_qty': 0,
+ }
+
+ return res
+
+ def action_view_bom(self):
+ action = self.env["ir.actions.actions"]._for_xml_id("mrp.product_open_bom")
+ template_ids = self.mapped('product_tmpl_id').ids
+ # bom specific to this variant or global to template
+ action['context'] = {
+ 'default_product_tmpl_id': template_ids[0],
+ 'default_product_id': self.ids[0],
+ }
+ action['domain'] = ['|', ('product_id', 'in', self.ids), '&', ('product_id', '=', False), ('product_tmpl_id', 'in', template_ids)]
+ return action
+
+ def action_view_mos(self):
+ action = self.product_tmpl_id.action_view_mos()
+ action['domain'] = [('state', '=', 'done'), ('product_id', 'in', self.ids)]
+ return action
+
+ def action_open_quants(self):
+ bom_kits = {}
+ for product in self:
+ bom = self.env['mrp.bom']._bom_find(product=product, bom_type='phantom')
+ if bom:
+ bom_kits[product] = bom
+ components = self - self.env['product.product'].concat(*list(bom_kits.keys()))
+ for product in bom_kits:
+ boms, bom_sub_lines = bom_kits[product].explode(product, 1)
+ components |= self.env['product.product'].concat(*[l[0].product_id for l in bom_sub_lines])
+ res = super(ProductProduct, components).action_open_quants()
+ if bom_kits:
+ res['context']['single_product'] = False
+ res['context'].pop('default_product_tmpl_id', None)
+ return res
diff --git a/addons/mrp/models/res_company.py b/addons/mrp/models/res_company.py
new file mode 100644
index 00000000..bbfdfd84
--- /dev/null
+++ b/addons/mrp/models/res_company.py
@@ -0,0 +1,38 @@
+# -*- coding: utf-8 -*-
+# Part of Odoo. See LICENSE file for full copyright and licensing details.
+
+from odoo import api, fields, models
+
+
+class Company(models.Model):
+ _inherit = 'res.company'
+
+ manufacturing_lead = fields.Float(
+ 'Manufacturing Lead Time', default=0.0, required=True,
+ help="Security days for each manufacturing operation.")
+
+ def _create_unbuild_sequence(self):
+ unbuild_vals = []
+ for company in self:
+ unbuild_vals.append({
+ 'name': 'Unbuild',
+ 'code': 'mrp.unbuild',
+ 'company_id': company.id,
+ 'prefix': 'UB/',
+ 'padding': 5,
+ 'number_next': 1,
+ 'number_increment': 1
+ })
+ if unbuild_vals:
+ self.env['ir.sequence'].create(unbuild_vals)
+
+ @api.model
+ def create_missing_unbuild_sequences(self):
+ company_ids = self.env['res.company'].search([])
+ company_has_unbuild_seq = self.env['ir.sequence'].search([('code', '=', 'mrp.unbuild')]).mapped('company_id')
+ company_todo_sequence = company_ids - company_has_unbuild_seq
+ company_todo_sequence._create_unbuild_sequence()
+
+ def _create_per_company_sequences(self):
+ super(Company, self)._create_per_company_sequences()
+ self._create_unbuild_sequence()
diff --git a/addons/mrp/models/res_config_settings.py b/addons/mrp/models/res_config_settings.py
new file mode 100644
index 00000000..0ab3a426
--- /dev/null
+++ b/addons/mrp/models/res_config_settings.py
@@ -0,0 +1,39 @@
+# -*- coding: utf-8 -*-
+# Part of Odoo. See LICENSE file for full copyright and licensing details.
+
+from odoo import api, fields, models
+
+
+class ResConfigSettings(models.TransientModel):
+ _inherit = 'res.config.settings'
+
+ manufacturing_lead = fields.Float(related='company_id.manufacturing_lead', string="Manufacturing Lead Time", readonly=False)
+ use_manufacturing_lead = fields.Boolean(string="Default Manufacturing Lead Time", config_parameter='mrp.use_manufacturing_lead')
+ group_mrp_byproducts = fields.Boolean("By-Products",
+ implied_group='mrp.group_mrp_byproducts')
+ module_mrp_mps = fields.Boolean("Master Production Schedule")
+ module_mrp_plm = fields.Boolean("Product Lifecycle Management (PLM)")
+ module_mrp_workorder = fields.Boolean("Work Orders")
+ module_quality_control = fields.Boolean("Quality")
+ module_mrp_subcontracting = fields.Boolean("Subcontracting")
+ group_mrp_routings = fields.Boolean("MRP Work Orders",
+ implied_group='mrp.group_mrp_routings')
+ group_locked_by_default = fields.Boolean("Lock Quantities To Consume", implied_group='mrp.group_locked_by_default')
+
+ @api.onchange('use_manufacturing_lead')
+ def _onchange_use_manufacturing_lead(self):
+ if not self.use_manufacturing_lead:
+ self.manufacturing_lead = 0.0
+
+ @api.onchange('group_mrp_routings')
+ def _onchange_group_mrp_routings(self):
+ # If we activate 'MRP Work Orders', it means that we need to install 'mrp_workorder'.
+ # The opposite is not always true: other modules (such as 'quality_mrp_workorder') may
+ # depend on 'mrp_workorder', so we should not automatically uninstall the module if 'MRP
+ # Work Orders' is deactivated.
+ # Long story short: if 'mrp_workorder' is already installed, we don't uninstall it based on
+ # group_mrp_routings
+ if self.group_mrp_routings:
+ self.module_mrp_workorder = True
+ elif not self.env['ir.module.module'].search([('name', '=', 'mrp_workorder'), ('state', '=', 'installed')]):
+ self.module_mrp_workorder = False
diff --git a/addons/mrp/models/stock_move.py b/addons/mrp/models/stock_move.py
new file mode 100644
index 00000000..fb7b7ed4
--- /dev/null
+++ b/addons/mrp/models/stock_move.py
@@ -0,0 +1,433 @@
+# -*- coding: utf-8 -*-
+# Part of Odoo. See LICENSE file for full copyright and licensing details.
+
+from odoo import api, exceptions, fields, models, _
+from odoo.exceptions import UserError
+from odoo.tools import float_compare, float_round, float_is_zero, OrderedSet
+
+
+class StockMoveLine(models.Model):
+ _inherit = 'stock.move.line'
+
+ workorder_id = fields.Many2one('mrp.workorder', 'Work Order', check_company=True)
+ production_id = fields.Many2one('mrp.production', 'Production Order', check_company=True)
+
+ @api.model_create_multi
+ def create(self, values):
+ res = super(StockMoveLine, self).create(values)
+ for line in res:
+ # If the line is added in a done production, we need to map it
+ # manually to the produced move lines in order to see them in the
+ # traceability report
+ if line.move_id.raw_material_production_id and line.state == 'done':
+ mo = line.move_id.raw_material_production_id
+ finished_lots = mo.lot_producing_id
+ finished_lots |= mo.move_finished_ids.filtered(lambda m: m.product_id != mo.product_id).move_line_ids.lot_id
+ if finished_lots:
+ produced_move_lines = mo.move_finished_ids.move_line_ids.filtered(lambda sml: sml.lot_id in finished_lots)
+ line.produce_line_ids = [(6, 0, produced_move_lines.ids)]
+ else:
+ produced_move_lines = mo.move_finished_ids.move_line_ids
+ line.produce_line_ids = [(6, 0, produced_move_lines.ids)]
+ return res
+
+ def _get_similar_move_lines(self):
+ lines = super(StockMoveLine, self)._get_similar_move_lines()
+ if self.move_id.production_id:
+ finished_moves = self.move_id.production_id.move_finished_ids
+ finished_move_lines = finished_moves.mapped('move_line_ids')
+ lines |= finished_move_lines.filtered(lambda ml: ml.product_id == self.product_id and (ml.lot_id or ml.lot_name))
+ if self.move_id.raw_material_production_id:
+ raw_moves = self.move_id.raw_material_production_id.move_raw_ids
+ raw_moves_lines = raw_moves.mapped('move_line_ids')
+ lines |= raw_moves_lines.filtered(lambda ml: ml.product_id == self.product_id and (ml.lot_id or ml.lot_name))
+ return lines
+
+ def _reservation_is_updatable(self, quantity, reserved_quant):
+ self.ensure_one()
+ if self.produce_line_ids.lot_id:
+ ml_remaining_qty = self.qty_done - self.product_uom_qty
+ ml_remaining_qty = self.product_uom_id._compute_quantity(ml_remaining_qty, self.product_id.uom_id, rounding_method="HALF-UP")
+ if float_compare(ml_remaining_qty, quantity, precision_rounding=self.product_id.uom_id.rounding) < 0:
+ return False
+ return super(StockMoveLine, self)._reservation_is_updatable(quantity, reserved_quant)
+
+ def write(self, vals):
+ for move_line in self:
+ production = move_line.move_id.production_id or move_line.move_id.raw_material_production_id
+ if production and move_line.state == 'done' and any(field in vals for field in ('lot_id', 'location_id', 'qty_done')):
+ move_line._log_message(production, move_line, 'mrp.track_production_move_template', vals)
+ return super(StockMoveLine, self).write(vals)
+
+ def _get_aggregated_product_quantities(self, **kwargs):
+ """Returns dictionary of products and corresponding values of interest grouped by optional kit_name
+
+ Removes descriptions where description == kit_name. kit_name is expected to be passed as a
+ kwargs value because this is not directly stored in move_line_ids. Unfortunately because we
+ are working with aggregated data, we have to loop through the aggregation to do this removal.
+
+ arguments: kit_name (optional): string value of a kit name passed as a kwarg
+ returns: dictionary {same_key_as_super: {same_values_as_super, ...}
+ """
+ aggregated_move_lines = super()._get_aggregated_product_quantities(**kwargs)
+ kit_name = kwargs.get('kit_name')
+ if kit_name:
+ for aggregated_move_line in aggregated_move_lines:
+ if aggregated_move_lines[aggregated_move_line]['description'] == kit_name:
+ aggregated_move_lines[aggregated_move_line]['description'] = ""
+ return aggregated_move_lines
+
+
+class StockMove(models.Model):
+ _inherit = 'stock.move'
+
+ created_production_id = fields.Many2one('mrp.production', 'Created Production Order', check_company=True)
+ production_id = fields.Many2one(
+ 'mrp.production', 'Production Order for finished products', check_company=True, index=True)
+ raw_material_production_id = fields.Many2one(
+ 'mrp.production', 'Production Order for components', check_company=True, index=True)
+ unbuild_id = fields.Many2one(
+ 'mrp.unbuild', 'Disassembly Order', check_company=True)
+ consume_unbuild_id = fields.Many2one(
+ 'mrp.unbuild', 'Consumed Disassembly Order', check_company=True)
+ allowed_operation_ids = fields.Many2many('mrp.routing.workcenter', compute='_compute_allowed_operation_ids')
+ operation_id = fields.Many2one(
+ 'mrp.routing.workcenter', 'Operation To Consume', check_company=True,
+ domain="[('id', 'in', allowed_operation_ids)]")
+ workorder_id = fields.Many2one(
+ 'mrp.workorder', 'Work Order To Consume', copy=False, check_company=True)
+ # Quantities to process, in normalized UoMs
+ bom_line_id = fields.Many2one('mrp.bom.line', 'BoM Line', check_company=True)
+ byproduct_id = fields.Many2one(
+ 'mrp.bom.byproduct', 'By-products', check_company=True,
+ help="By-product line that generated the move in a manufacturing order")
+ unit_factor = fields.Float('Unit Factor', compute='_compute_unit_factor', store=True)
+ is_done = fields.Boolean(
+ 'Done', compute='_compute_is_done',
+ store=True,
+ help='Technical Field to order moves')
+ order_finished_lot_ids = fields.Many2many('stock.production.lot', string="Finished Lot/Serial Number", compute='_compute_order_finished_lot_ids')
+ should_consume_qty = fields.Float('Quantity To Consume', compute='_compute_should_consume_qty', digits='Product Unit of Measure')
+
+ @api.depends('raw_material_production_id.priority')
+ def _compute_priority(self):
+ super()._compute_priority()
+ for move in self:
+ move.priority = move.raw_material_production_id.priority or move.priority or '0'
+
+ @api.depends('raw_material_production_id.lot_producing_id')
+ def _compute_order_finished_lot_ids(self):
+ for move in self:
+ move.order_finished_lot_ids = move.raw_material_production_id.lot_producing_id
+
+ @api.depends('raw_material_production_id.bom_id')
+ def _compute_allowed_operation_ids(self):
+ for move in self:
+ if (
+ not move.raw_material_production_id or
+ not move.raw_material_production_id.bom_id or not
+ move.raw_material_production_id.bom_id.operation_ids
+ ):
+ move.allowed_operation_ids = self.env['mrp.routing.workcenter']
+ else:
+ operation_domain = [
+ ('id', 'in', move.raw_material_production_id.bom_id.operation_ids.ids),
+ '|',
+ ('company_id', '=', move.company_id.id),
+ ('company_id', '=', False)
+ ]
+ move.allowed_operation_ids = self.env['mrp.routing.workcenter'].search(operation_domain)
+
+ @api.depends('raw_material_production_id.is_locked', 'production_id.is_locked')
+ def _compute_is_locked(self):
+ super(StockMove, self)._compute_is_locked()
+ for move in self:
+ if move.raw_material_production_id:
+ move.is_locked = move.raw_material_production_id.is_locked
+ if move.production_id:
+ move.is_locked = move.production_id.is_locked
+
+ @api.depends('state')
+ def _compute_is_done(self):
+ for move in self:
+ move.is_done = (move.state in ('done', 'cancel'))
+
+ @api.depends('product_uom_qty',
+ 'raw_material_production_id', 'raw_material_production_id.product_qty', 'raw_material_production_id.qty_produced',
+ 'production_id', 'production_id.product_qty', 'production_id.qty_produced')
+ def _compute_unit_factor(self):
+ for move in self:
+ mo = move.raw_material_production_id or move.production_id
+ if mo:
+ move.unit_factor = move.product_uom_qty / ((mo.product_qty - mo.qty_produced) or 1)
+ else:
+ move.unit_factor = 1.0
+
+ @api.depends('raw_material_production_id', 'raw_material_production_id.name', 'production_id', 'production_id.name')
+ def _compute_reference(self):
+ moves_with_reference = self.env['stock.move']
+ for move in self:
+ if move.raw_material_production_id and move.raw_material_production_id.name:
+ move.reference = move.raw_material_production_id.name
+ moves_with_reference |= move
+ if move.production_id and move.production_id.name:
+ move.reference = move.production_id.name
+ moves_with_reference |= move
+ super(StockMove, self - moves_with_reference)._compute_reference()
+
+ @api.depends('raw_material_production_id.qty_producing', 'product_uom_qty', 'product_uom')
+ def _compute_should_consume_qty(self):
+ for move in self:
+ mo = move.raw_material_production_id
+ if not mo or not move.product_uom:
+ move.should_consume_qty = 0
+ continue
+ move.should_consume_qty = float_round((mo.qty_producing - mo.qty_produced) * move.unit_factor, precision_rounding=move.product_uom.rounding)
+
+ @api.onchange('product_uom_qty')
+ def _onchange_product_uom_qty(self):
+ if self.raw_material_production_id and self.has_tracking == 'none':
+ mo = self.raw_material_production_id
+ self._update_quantity_done(mo)
+
+ @api.model
+ def default_get(self, fields_list):
+ defaults = super(StockMove, self).default_get(fields_list)
+ if self.env.context.get('default_raw_material_production_id') or self.env.context.get('default_production_id'):
+ production_id = self.env['mrp.production'].browse(self.env.context.get('default_raw_material_production_id') or self.env.context.get('default_production_id'))
+ if production_id.state not in ('draft', 'cancel'):
+ if production_id.state != 'done':
+ defaults['state'] = 'draft'
+ else:
+ defaults['state'] = 'done'
+ defaults['product_uom_qty'] = 0.0
+ defaults['additional'] = True
+ return defaults
+
+ def write(self, vals):
+ if 'product_uom_qty' in vals and 'move_line_ids' in vals:
+ # first update lines then product_uom_qty as the later will unreserve
+ # so possibly unlink lines
+ move_line_vals = vals.pop('move_line_ids')
+ super().write({'move_line_ids': move_line_vals})
+ return super().write(vals)
+
+ def unlink(self):
+ # Avoid deleting move related to active MO
+ for move in self:
+ if move.production_id and move.production_id.state not in ('draft', 'cancel'):
+ raise UserError(_('Please cancel the Manufacture Order first.'))
+ return super(StockMove, self).unlink()
+
+ def _action_assign(self):
+ res = super(StockMove, self)._action_assign()
+ for move in self.filtered(lambda x: x.production_id or x.raw_material_production_id):
+ if move.move_line_ids:
+ move.move_line_ids.write({'production_id': move.raw_material_production_id.id,
+ 'workorder_id': move.workorder_id.id,})
+ return res
+
+ def _action_confirm(self, merge=True, merge_into=False):
+ moves = self.action_explode()
+ # we go further with the list of ids potentially changed by action_explode
+ return super(StockMove, moves)._action_confirm(merge=merge, merge_into=merge_into)
+
+ def action_explode(self):
+ """ Explodes pickings """
+ # in order to explode a move, we must have a picking_type_id on that move because otherwise the move
+ # won't be assigned to a picking and it would be weird to explode a move into several if they aren't
+ # all grouped in the same picking.
+ moves_ids_to_return = OrderedSet()
+ moves_ids_to_unlink = OrderedSet()
+ phantom_moves_vals_list = []
+ for move in self:
+ if not move.picking_type_id or (move.production_id and move.production_id.product_id == move.product_id):
+ moves_ids_to_return.add(move.id)
+ continue
+ bom = self.env['mrp.bom'].sudo()._bom_find(product=move.product_id, company_id=move.company_id.id, bom_type='phantom')
+ if not bom:
+ moves_ids_to_return.add(move.id)
+ continue
+ if move.picking_id.immediate_transfer:
+ factor = move.product_uom._compute_quantity(move.quantity_done, bom.product_uom_id) / bom.product_qty
+ else:
+ factor = move.product_uom._compute_quantity(move.product_uom_qty, bom.product_uom_id) / bom.product_qty
+ boms, lines = bom.sudo().explode(move.product_id, factor, picking_type=bom.picking_type_id)
+ for bom_line, line_data in lines:
+ if move.picking_id.immediate_transfer:
+ phantom_moves_vals_list += move._generate_move_phantom(bom_line, 0, line_data['qty'])
+ else:
+ phantom_moves_vals_list += move._generate_move_phantom(bom_line, line_data['qty'], 0)
+ # delete the move with original product which is not relevant anymore
+ moves_ids_to_unlink.add(move.id)
+
+ self.env['stock.move'].browse(moves_ids_to_unlink).sudo().unlink()
+ if phantom_moves_vals_list:
+ phantom_moves = self.env['stock.move'].create(phantom_moves_vals_list)
+ phantom_moves._adjust_procure_method()
+ moves_ids_to_return |= phantom_moves.action_explode().ids
+ return self.env['stock.move'].browse(moves_ids_to_return)
+
+ def action_show_details(self):
+ self.ensure_one()
+ action = super().action_show_details()
+ if self.raw_material_production_id:
+ action['views'] = [(self.env.ref('mrp.view_stock_move_operations_raw').id, 'form')]
+ action['context']['show_destination_location'] = False
+ elif self.production_id:
+ action['views'] = [(self.env.ref('mrp.view_stock_move_operations_finished').id, 'form')]
+ action['context']['show_source_location'] = False
+ return action
+
+ def _action_cancel(self):
+ res = super(StockMove, self)._action_cancel()
+ for production in self.mapped('raw_material_production_id'):
+ if production.state != 'cancel':
+ continue
+ production._action_cancel()
+ return res
+
+ def _prepare_move_split_vals(self, qty):
+ defaults = super()._prepare_move_split_vals(qty)
+ defaults['workorder_id'] = False
+ return defaults
+
+ def _prepare_phantom_move_values(self, bom_line, product_qty, quantity_done):
+ return {
+ 'picking_id': self.picking_id.id if self.picking_id else False,
+ 'product_id': bom_line.product_id.id,
+ 'product_uom': bom_line.product_uom_id.id,
+ 'product_uom_qty': product_qty,
+ 'quantity_done': quantity_done,
+ 'state': 'draft', # will be confirmed below
+ 'name': self.name,
+ 'bom_line_id': bom_line.id,
+ }
+
+ def _generate_move_phantom(self, bom_line, product_qty, quantity_done):
+ vals = []
+ if bom_line.product_id.type in ['product', 'consu']:
+ vals = self.copy_data(default=self._prepare_phantom_move_values(bom_line, product_qty, quantity_done))
+ if self.state == 'assigned':
+ vals['state'] = 'assigned'
+ return vals
+
+ @api.model
+ def _consuming_picking_types(self):
+ res = super()._consuming_picking_types()
+ res.append('mrp_operation')
+ return res
+
+ def _get_source_document(self):
+ res = super()._get_source_document()
+ return res or self.production_id or self.raw_material_production_id
+
+ def _get_upstream_documents_and_responsibles(self, visited):
+ if self.production_id and self.production_id.state not in ('done', 'cancel'):
+ return [(self.production_id, self.production_id.user_id, visited)]
+ else:
+ return super(StockMove, self)._get_upstream_documents_and_responsibles(visited)
+
+ def _delay_alert_get_documents(self):
+ res = super(StockMove, self)._delay_alert_get_documents()
+ productions = self.raw_material_production_id | self.production_id
+ return res + list(productions)
+
+ def _should_be_assigned(self):
+ res = super(StockMove, self)._should_be_assigned()
+ return bool(res and not (self.production_id or self.raw_material_production_id))
+
+ def _should_bypass_set_qty_producing(self):
+ if self.state in ('done', 'cancel'):
+ return True
+ # Do not update extra product quantities
+ if float_is_zero(self.product_uom_qty, precision_rounding=self.product_uom.rounding):
+ return True
+ if self.has_tracking != 'none' or self.state == 'done':
+ return True
+ return False
+
+ def _should_bypass_reservation(self):
+ res = super(StockMove, self)._should_bypass_reservation()
+ return bool(res and not self.production_id)
+
+ def _key_assign_picking(self):
+ keys = super(StockMove, self)._key_assign_picking()
+ return keys + (self.created_production_id,)
+
+ @api.model
+ def _prepare_merge_moves_distinct_fields(self):
+ distinct_fields = super()._prepare_merge_moves_distinct_fields()
+ distinct_fields.append('created_production_id')
+ distinct_fields.append('bom_line_id')
+ return distinct_fields
+
+ @api.model
+ def _prepare_merge_move_sort_method(self, move):
+ keys_sorted = super()._prepare_merge_move_sort_method(move)
+ keys_sorted.append(move.created_production_id.id)
+ keys_sorted.append(move.bom_line_id.id)
+ return keys_sorted
+
+ def _compute_kit_quantities(self, product_id, kit_qty, kit_bom, filters):
+ """ Computes the quantity delivered or received when a kit is sold or purchased.
+ A ratio 'qty_processed/qty_needed' is computed for each component, and the lowest one is kept
+ to define the kit's quantity delivered or received.
+ :param product_id: The kit itself a.k.a. the finished product
+ :param kit_qty: The quantity from the order line
+ :param kit_bom: The kit's BoM
+ :param filters: Dict of lambda expression to define the moves to consider and the ones to ignore
+ :return: The quantity delivered or received
+ """
+ qty_ratios = []
+ boms, bom_sub_lines = kit_bom.explode(product_id, kit_qty)
+ for bom_line, bom_line_data in bom_sub_lines:
+ # skip service since we never deliver them
+ if bom_line.product_id.type == 'service':
+ continue
+ if float_is_zero(bom_line_data['qty'], precision_rounding=bom_line.product_uom_id.rounding):
+ # As BoMs allow components with 0 qty, a.k.a. optionnal components, we simply skip those
+ # to avoid a division by zero.
+ continue
+ bom_line_moves = self.filtered(lambda m: m.bom_line_id == bom_line)
+ if bom_line_moves:
+ # We compute the quantities needed of each components to make one kit.
+ # Then, we collect every relevant moves related to a specific component
+ # to know how many are considered delivered.
+ uom_qty_per_kit = bom_line_data['qty'] / bom_line_data['original_qty']
+ qty_per_kit = bom_line.product_uom_id._compute_quantity(uom_qty_per_kit, bom_line.product_id.uom_id)
+ if not qty_per_kit:
+ continue
+ incoming_moves = bom_line_moves.filtered(filters['incoming_moves'])
+ outgoing_moves = bom_line_moves.filtered(filters['outgoing_moves'])
+ qty_processed = sum(incoming_moves.mapped('product_qty')) - sum(outgoing_moves.mapped('product_qty'))
+ # We compute a ratio to know how many kits we can produce with this quantity of that specific component
+ qty_ratios.append(float_round(qty_processed / qty_per_kit, precision_rounding=bom_line.product_id.uom_id.rounding))
+ else:
+ return 0.0
+ if qty_ratios:
+ # Now that we have every ratio by components, we keep the lowest one to know how many kits we can produce
+ # with the quantities delivered of each component. We use the floor division here because a 'partial kit'
+ # doesn't make sense.
+ return min(qty_ratios) // 1
+ else:
+ return 0.0
+
+ def _show_details_in_draft(self):
+ self.ensure_one()
+ production = self.raw_material_production_id or self.production_id
+ if production and (self.state != 'draft' or production.state != 'draft'):
+ return True
+ elif production:
+ return False
+ else:
+ return super()._show_details_in_draft()
+
+ def _update_quantity_done(self, mo):
+ self.ensure_one()
+ new_qty = mo.product_uom_id._compute_quantity((mo.qty_producing - mo.qty_produced) * self.unit_factor, mo.product_uom_id, rounding_method='HALF-UP')
+ if not self.is_quantity_done_editable:
+ self.move_line_ids.filtered(lambda ml: ml.state not in ('done', 'cancel')).qty_done = 0
+ self.move_line_ids = self._set_quantity_done_prepare_vals(new_qty)
+ else:
+ self.quantity_done = new_qty
diff --git a/addons/mrp/models/stock_orderpoint.py b/addons/mrp/models/stock_orderpoint.py
new file mode 100644
index 00000000..3c735700
--- /dev/null
+++ b/addons/mrp/models/stock_orderpoint.py
@@ -0,0 +1,90 @@
+# -*- coding: utf-8 -*-
+# Part of Odoo. See LICENSE file for full copyright and licensing details.
+
+from odoo import _, api, fields, models
+from odoo.tools.float_utils import float_is_zero
+
+
+class StockWarehouseOrderpoint(models.Model):
+ _inherit = 'stock.warehouse.orderpoint'
+
+ show_bom = fields.Boolean('Show BoM column', compute='_compute_show_bom')
+ bom_id = fields.Many2one(
+ 'mrp.bom', string='Bill of Materials', check_company=True,
+ domain="[('type', '=', 'normal'), '&', '|', ('company_id', '=', company_id), ('company_id', '=', False), '|', ('product_id', '=', product_id), '&', ('product_id', '=', False), ('product_tmpl_id', '=', product_tmpl_id)]")
+
+ def _get_replenishment_order_notification(self):
+ self.ensure_one()
+ production = self.env['mrp.production'].search([
+ ('orderpoint_id', 'in', self.ids)
+ ], order='create_date desc', limit=1)
+ if production:
+ action = self.env.ref('mrp.action_mrp_production_form')
+ return {
+ 'type': 'ir.actions.client',
+ 'tag': 'display_notification',
+ 'params': {
+ 'title': _('The following replenishment order has been generated'),
+ 'message': '%s',
+ 'links': [{
+ 'label': production.name,
+ 'url': f'#action={action.id}&id={production.id}&model=mrp.production'
+ }],
+ 'sticky': False,
+ }
+ }
+ return super()._get_replenishment_order_notification()
+
+ @api.depends('route_id')
+ def _compute_show_bom(self):
+ manufacture_route = []
+ for res in self.env['stock.rule'].search_read([('action', '=', 'manufacture')], ['route_id']):
+ manufacture_route.append(res['route_id'][0])
+ for orderpoint in self:
+ orderpoint.show_bom = orderpoint.route_id.id in manufacture_route
+
+ def _quantity_in_progress(self):
+ bom_kit_orderpoints = {
+ orderpoint: bom
+ for orderpoint in self
+ for bom in self.env['mrp.bom']._bom_find(product=orderpoint.product_id, bom_type='phantom')
+ if bom
+ }
+ res = super(StockWarehouseOrderpoint, self.filtered(lambda p: p not in bom_kit_orderpoints))._quantity_in_progress()
+ for orderpoint in bom_kit_orderpoints:
+ boms, bom_sub_lines = bom_kit_orderpoints[orderpoint].explode(orderpoint.product_id, 1)
+ ratios_qty_available = []
+ # total = qty_available + in_progress
+ ratios_total = []
+ for bom_line, bom_line_data in bom_sub_lines:
+ component = bom_line.product_id
+ if component.type != 'product' or float_is_zero(bom_line_data['qty'], precision_rounding=bom_line.product_uom_id.rounding):
+ continue
+ uom_qty_per_kit = bom_line_data['qty'] / bom_line_data['original_qty']
+ qty_per_kit = bom_line.product_uom_id._compute_quantity(uom_qty_per_kit, bom_line.product_id.uom_id, raise_if_failure=False)
+ if not qty_per_kit:
+ continue
+ qty_by_product_location, dummy = component._get_quantity_in_progress(orderpoint.location_id.ids)
+ qty_in_progress = qty_by_product_location.get((component.id, orderpoint.location_id.id), 0.0)
+ qty_available = component.qty_available / qty_per_kit
+ ratios_qty_available.append(qty_available)
+ ratios_total.append(qty_available + (qty_in_progress / qty_per_kit))
+ # For a kit, the quantity in progress is :
+ # (the quantity if we have received all in-progress components) - (the quantity using only available components)
+ product_qty = min(ratios_total or [0]) - min(ratios_qty_available or [0])
+ res[orderpoint.id] = orderpoint.product_id.uom_id._compute_quantity(product_qty, orderpoint.product_uom, round=False)
+ return res
+
+ def _set_default_route_id(self):
+ route_id = self.env['stock.rule'].search([
+ ('action', '=', 'manufacture')
+ ]).route_id
+ orderpoint_wh_bom = self.filtered(lambda o: o.product_id.bom_ids)
+ if route_id and orderpoint_wh_bom:
+ orderpoint_wh_bom.route_id = route_id[0].id
+ return super()._set_default_route_id()
+
+ def _prepare_procurement_values(self, date=False, group=False):
+ values = super()._prepare_procurement_values(date=date, group=group)
+ values['bom_id'] = self.bom_id
+ return values
diff --git a/addons/mrp/models/stock_picking.py b/addons/mrp/models/stock_picking.py
new file mode 100644
index 00000000..9a748336
--- /dev/null
+++ b/addons/mrp/models/stock_picking.py
@@ -0,0 +1,72 @@
+# -*- coding: utf-8 -*-
+# Part of Odoo. See LICENSE file for full copyright and licensing details.
+
+from odoo import fields, models
+
+
+class StockPickingType(models.Model):
+ _inherit = 'stock.picking.type'
+
+ code = fields.Selection(selection_add=[
+ ('mrp_operation', 'Manufacturing')
+ ], ondelete={'mrp_operation': 'cascade'})
+ count_mo_todo = fields.Integer(string="Number of Manufacturing Orders to Process",
+ compute='_get_mo_count')
+ count_mo_waiting = fields.Integer(string="Number of Manufacturing Orders Waiting",
+ compute='_get_mo_count')
+ count_mo_late = fields.Integer(string="Number of Manufacturing Orders Late",
+ compute='_get_mo_count')
+ use_create_components_lots = fields.Boolean(
+ string="Create New Lots/Serial Numbers for Components",
+ help="Allow to create new lot/serial numbers for the components",
+ default=False,
+ )
+
+ def _get_mo_count(self):
+ mrp_picking_types = self.filtered(lambda picking: picking.code == 'mrp_operation')
+ if not mrp_picking_types:
+ self.count_mo_waiting = False
+ self.count_mo_todo = False
+ self.count_mo_late = False
+ return
+ domains = {
+ 'count_mo_waiting': [('reservation_state', '=', 'waiting')],
+ 'count_mo_todo': [('state', 'in', ('confirmed', 'draft', 'progress'))],
+ 'count_mo_late': [('date_planned_start', '<', fields.Date.today()), ('state', '=', 'confirmed')],
+ }
+ for field in domains:
+ data = self.env['mrp.production'].read_group(domains[field] +
+ [('state', 'not in', ('done', 'cancel')), ('picking_type_id', 'in', self.ids)],
+ ['picking_type_id'], ['picking_type_id'])
+ count = {x['picking_type_id'] and x['picking_type_id'][0]: x['picking_type_id_count'] for x in data}
+ for record in mrp_picking_types:
+ record[field] = count.get(record.id, 0)
+ remaining = (self - mrp_picking_types)
+ if remaining:
+ remaining.count_mo_waiting = False
+ remaining.count_mo_todo = False
+ remaining.count_mo_late = False
+
+ def get_mrp_stock_picking_action_picking_type(self):
+ return self._get_action('mrp.mrp_production_action_picking_deshboard')
+
+class StockPicking(models.Model):
+ _inherit = 'stock.picking'
+
+ def _less_quantities_than_expected_add_documents(self, moves, documents):
+ documents = super(StockPicking, self)._less_quantities_than_expected_add_documents(moves, documents)
+
+ def _keys_in_sorted(move):
+ """ sort by picking and the responsible for the product the
+ move.
+ """
+ return (move.raw_material_production_id.id, move.product_id.responsible_id.id)
+
+ def _keys_in_groupby(move):
+ """ group by picking and the responsible for the product the
+ move.
+ """
+ return (move.raw_material_production_id, move.product_id.responsible_id)
+
+ production_documents = self._log_activity_get_documents(moves, 'move_dest_ids', 'DOWN', _keys_in_sorted, _keys_in_groupby)
+ return {**documents, **production_documents}
diff --git a/addons/mrp/models/stock_production_lot.py b/addons/mrp/models/stock_production_lot.py
new file mode 100644
index 00000000..2df39cd7
--- /dev/null
+++ b/addons/mrp/models/stock_production_lot.py
@@ -0,0 +1,17 @@
+# -*- coding: utf-8 -*-
+# Part of Odoo. See LICENSE file for full copyright and licensing details.
+
+from odoo import models, _
+from odoo.exceptions import UserError
+
+
+class StockProductionLot(models.Model):
+ _inherit = 'stock.production.lot'
+
+ def _check_create(self):
+ active_mo_id = self.env.context.get('active_mo_id')
+ if active_mo_id:
+ active_mo = self.env['mrp.production'].browse(active_mo_id)
+ if not active_mo.picking_type_id.use_create_components_lots:
+ raise UserError(_('You are not allowed to create or edit a lot or serial number for the components with the operation type "Manufacturing". To change this, go on the operation type and tick the box "Create New Lots/Serial Numbers for Components".'))
+ return super(StockProductionLot, self)._check_create()
diff --git a/addons/mrp/models/stock_rule.py b/addons/mrp/models/stock_rule.py
new file mode 100644
index 00000000..c32fb8dd
--- /dev/null
+++ b/addons/mrp/models/stock_rule.py
@@ -0,0 +1,211 @@
+# -*- coding: utf-8 -*-
+# Part of Odoo. See LICENSE file for full copyright and licensing details.
+
+from collections import defaultdict
+from dateutil.relativedelta import relativedelta
+
+from odoo import api, fields, models, SUPERUSER_ID, _
+from odoo.osv import expression
+from odoo.addons.stock.models.stock_rule import ProcurementException
+
+
+class StockRule(models.Model):
+ _inherit = 'stock.rule'
+ action = fields.Selection(selection_add=[
+ ('manufacture', 'Manufacture')
+ ], ondelete={'manufacture': 'cascade'})
+
+ def _get_message_dict(self):
+ message_dict = super(StockRule, self)._get_message_dict()
+ source, destination, operation = self._get_message_values()
+ manufacture_message = _('When products are needed in <b>%s</b>, <br/> a manufacturing order is created to fulfill the need.') % (destination)
+ if self.location_src_id:
+ manufacture_message += _(' <br/><br/> The components will be taken from <b>%s</b>.') % (source)
+ message_dict.update({
+ 'manufacture': manufacture_message
+ })
+ return message_dict
+
+ @api.depends('action')
+ def _compute_picking_type_code_domain(self):
+ remaining = self.browse()
+ for rule in self:
+ if rule.action == 'manufacture':
+ rule.picking_type_code_domain = 'mrp_operation'
+ else:
+ remaining |= rule
+ super(StockRule, remaining)._compute_picking_type_code_domain()
+
+ @api.model
+ def _run_manufacture(self, procurements):
+ productions_values_by_company = defaultdict(list)
+ errors = []
+ for procurement, rule in procurements:
+ bom = rule._get_matching_bom(procurement.product_id, procurement.company_id, procurement.values)
+ if not bom:
+ msg = _('There is no Bill of Material of type manufacture or kit found for the product %s. Please define a Bill of Material for this product.') % (procurement.product_id.display_name,)
+ errors.append((procurement, msg))
+
+ productions_values_by_company[procurement.company_id.id].append(rule._prepare_mo_vals(*procurement, bom))
+
+ if errors:
+ raise ProcurementException(errors)
+
+ for company_id, productions_values in productions_values_by_company.items():
+ # create the MO as SUPERUSER because the current user may not have the rights to do it (mto product launched by a sale for example)
+ productions = self.env['mrp.production'].with_user(SUPERUSER_ID).sudo().with_company(company_id).create(productions_values)
+ self.env['stock.move'].sudo().create(productions._get_moves_raw_values())
+ self.env['stock.move'].sudo().create(productions._get_moves_finished_values())
+ productions._create_workorder()
+ productions.filtered(lambda p: p.move_raw_ids).action_confirm()
+
+ for production in productions:
+ origin_production = production.move_dest_ids and production.move_dest_ids[0].raw_material_production_id or False
+ orderpoint = production.orderpoint_id
+ if orderpoint:
+ production.message_post_with_view('mail.message_origin_link',
+ values={'self': production, 'origin': orderpoint},
+ subtype_id=self.env.ref('mail.mt_note').id)
+ if origin_production:
+ production.message_post_with_view('mail.message_origin_link',
+ values={'self': production, 'origin': origin_production},
+ subtype_id=self.env.ref('mail.mt_note').id)
+ return True
+
+ @api.model
+ def _run_pull(self, procurements):
+ # Override to correctly assign the move generated from the pull
+ # in its production order (pbm_sam only)
+ for procurement, rule in procurements:
+ warehouse_id = rule.warehouse_id
+ if not warehouse_id:
+ warehouse_id = rule.location_id.get_warehouse()
+ if rule.picking_type_id == warehouse_id.sam_type_id:
+ manu_type_id = warehouse_id.manu_type_id
+ if manu_type_id:
+ name = manu_type_id.sequence_id.next_by_id()
+ else:
+ name = self.env['ir.sequence'].next_by_code('mrp.production') or _('New')
+ # Create now the procurement group that will be assigned to the new MO
+ # This ensure that the outgoing move PostProduction -> Stock is linked to its MO
+ # rather than the original record (MO or SO)
+ procurement.values['group_id'] = self.env["procurement.group"].create({'name': name})
+ return super()._run_pull(procurements)
+
+ def _get_custom_move_fields(self):
+ fields = super(StockRule, self)._get_custom_move_fields()
+ fields += ['bom_line_id']
+ return fields
+
+ def _get_matching_bom(self, product_id, company_id, values):
+ if values.get('bom_id', False):
+ return values['bom_id']
+ return self.env['mrp.bom']._bom_find(
+ product=product_id, picking_type=self.picking_type_id, bom_type='normal', company_id=company_id.id)
+
+ def _prepare_mo_vals(self, product_id, product_qty, product_uom, location_id, name, origin, company_id, values, bom):
+ date_planned = self._get_date_planned(product_id, company_id, values)
+ date_deadline = values.get('date_deadline') or date_planned + relativedelta(days=company_id.manufacturing_lead) + relativedelta(days=product_id.produce_delay)
+ mo_values = {
+ 'origin': origin,
+ 'product_id': product_id.id,
+ 'product_description_variants': values.get('product_description_variants'),
+ 'product_qty': product_qty,
+ 'product_uom_id': product_uom.id,
+ 'location_src_id': self.location_src_id.id or self.picking_type_id.default_location_src_id.id or location_id.id,
+ 'location_dest_id': location_id.id,
+ 'bom_id': bom.id,
+ 'date_deadline': date_deadline,
+ 'date_planned_start': date_planned,
+ 'procurement_group_id': False,
+ 'propagate_cancel': self.propagate_cancel,
+ 'orderpoint_id': values.get('orderpoint_id', False) and values.get('orderpoint_id').id,
+ 'picking_type_id': self.picking_type_id.id or values['warehouse_id'].manu_type_id.id,
+ 'company_id': company_id.id,
+ 'move_dest_ids': values.get('move_dest_ids') and [(4, x.id) for x in values['move_dest_ids']] or False,
+ 'user_id': False,
+ }
+ # Use the procurement group created in _run_pull mrp override
+ # Preserve the origin from the original stock move, if available
+ if location_id.get_warehouse().manufacture_steps == 'pbm_sam' and values.get('move_dest_ids') and values.get('group_id') and values['move_dest_ids'][0].origin != values['group_id'].name:
+ origin = values['move_dest_ids'][0].origin
+ mo_values.update({
+ 'name': values['group_id'].name,
+ 'procurement_group_id': values['group_id'].id,
+ 'origin': origin,
+ })
+ return mo_values
+
+ def _get_date_planned(self, product_id, company_id, values):
+ format_date_planned = fields.Datetime.from_string(values['date_planned'])
+ date_planned = format_date_planned - relativedelta(days=product_id.produce_delay)
+ date_planned = date_planned - relativedelta(days=company_id.manufacturing_lead)
+ if date_planned == format_date_planned:
+ date_planned = date_planned - relativedelta(hours=1)
+ return date_planned
+
+ def _get_lead_days(self, product):
+ """Add the product and company manufacture delay to the cumulative delay
+ and cumulative description.
+ """
+ delay, delay_description = super()._get_lead_days(product)
+ bypass_delay_description = self.env.context.get('bypass_delay_description')
+ manufacture_rule = self.filtered(lambda r: r.action == 'manufacture')
+ if not manufacture_rule:
+ return delay, delay_description
+ manufacture_rule.ensure_one()
+ manufacture_delay = product.produce_delay
+ delay += manufacture_delay
+ if not bypass_delay_description:
+ delay_description += '<tr><td>%s</td><td class="text-right">+ %d %s</td></tr>' % (_('Manufacturing Lead Time'), manufacture_delay, _('day(s)'))
+ security_delay = manufacture_rule.picking_type_id.company_id.manufacturing_lead
+ delay += security_delay
+ if not bypass_delay_description:
+ delay_description += '<tr><td>%s</td><td class="text-right">+ %d %s</td></tr>' % (_('Manufacture Security Lead Time'), security_delay, _('day(s)'))
+ return delay, delay_description
+
+ def _push_prepare_move_copy_values(self, move_to_copy, new_date):
+ new_move_vals = super(StockRule, self)._push_prepare_move_copy_values(move_to_copy, new_date)
+ new_move_vals['production_id'] = False
+ return new_move_vals
+
+
+class ProcurementGroup(models.Model):
+ _inherit = 'procurement.group'
+
+ mrp_production_ids = fields.One2many('mrp.production', 'procurement_group_id')
+
+ @api.model
+ def run(self, procurements, raise_user_error=True):
+ """ If 'run' is called on a kit, this override is made in order to call
+ the original 'run' method with the values of the components of that kit.
+ """
+ procurements_without_kit = []
+ for procurement in procurements:
+ bom_kit = self.env['mrp.bom']._bom_find(
+ product=procurement.product_id,
+ company_id=procurement.company_id.id,
+ bom_type='phantom',
+ )
+ if bom_kit:
+ order_qty = procurement.product_uom._compute_quantity(procurement.product_qty, bom_kit.product_uom_id, round=False)
+ qty_to_produce = (order_qty / bom_kit.product_qty)
+ boms, bom_sub_lines = bom_kit.explode(procurement.product_id, qty_to_produce)
+ for bom_line, bom_line_data in bom_sub_lines:
+ bom_line_uom = bom_line.product_uom_id
+ quant_uom = bom_line.product_id.uom_id
+ # recreate dict of values since each child has its own bom_line_id
+ values = dict(procurement.values, bom_line_id=bom_line.id)
+ component_qty, procurement_uom = bom_line_uom._adjust_uom_quantities(bom_line_data['qty'], quant_uom)
+ procurements_without_kit.append(self.env['procurement.group'].Procurement(
+ bom_line.product_id, component_qty, procurement_uom,
+ procurement.location_id, procurement.name,
+ procurement.origin, procurement.company_id, values))
+ else:
+ procurements_without_kit.append(procurement)
+ return super(ProcurementGroup, self).run(procurements_without_kit, raise_user_error=raise_user_error)
+
+ def _get_moves_to_assign_domain(self, company_id):
+ domain = super(ProcurementGroup, self)._get_moves_to_assign_domain(company_id)
+ domain = expression.AND([domain, [('production_id', '=', False)]])
+ return domain
diff --git a/addons/mrp/models/stock_scrap.py b/addons/mrp/models/stock_scrap.py
new file mode 100644
index 00000000..bec0bf27
--- /dev/null
+++ b/addons/mrp/models/stock_scrap.py
@@ -0,0 +1,36 @@
+# -*- coding: utf-8 -*-
+# Part of Odoo. See LICENSE file for full copyright and licensing details.
+
+from odoo import api, fields, models
+
+
+class StockScrap(models.Model):
+ _inherit = 'stock.scrap'
+
+ production_id = fields.Many2one(
+ 'mrp.production', 'Manufacturing Order',
+ states={'done': [('readonly', True)]}, check_company=True)
+ workorder_id = fields.Many2one(
+ 'mrp.workorder', 'Work Order',
+ states={'done': [('readonly', True)]},
+ help='Not to restrict or prefer quants, but informative.', check_company=True)
+
+ @api.onchange('workorder_id')
+ def _onchange_workorder_id(self):
+ if self.workorder_id:
+ self.location_id = self.workorder_id.production_id.location_src_id.id
+
+ @api.onchange('production_id')
+ def _onchange_production_id(self):
+ if self.production_id:
+ self.location_id = self.production_id.move_raw_ids.filtered(lambda x: x.state not in ('done', 'cancel')) and self.production_id.location_src_id.id or self.production_id.location_dest_id.id
+
+ def _prepare_move_values(self):
+ vals = super(StockScrap, self)._prepare_move_values()
+ if self.production_id:
+ vals['origin'] = vals['origin'] or self.production_id.name
+ if self.product_id in self.production_id.move_finished_ids.mapped('product_id'):
+ vals.update({'production_id': self.production_id.id})
+ else:
+ vals.update({'raw_material_production_id': self.production_id.id})
+ return vals
diff --git a/addons/mrp/models/stock_traceability.py b/addons/mrp/models/stock_traceability.py
new file mode 100644
index 00000000..1fddc4bf
--- /dev/null
+++ b/addons/mrp/models/stock_traceability.py
@@ -0,0 +1,34 @@
+from odoo import models, api
+
+class MrpStockReport(models.TransientModel):
+ _inherit = 'stock.traceability.report'
+
+ @api.model
+ def _get_reference(self, move_line):
+ res_model, res_id, ref = super(MrpStockReport, self)._get_reference(move_line)
+ if move_line.move_id.production_id and not move_line.move_id.scrapped:
+ res_model = 'mrp.production'
+ res_id = move_line.move_id.production_id.id
+ ref = move_line.move_id.production_id.name
+ if move_line.move_id.raw_material_production_id and not move_line.move_id.scrapped:
+ res_model = 'mrp.production'
+ res_id = move_line.move_id.raw_material_production_id.id
+ ref = move_line.move_id.raw_material_production_id.name
+ if move_line.move_id.unbuild_id:
+ res_model = 'mrp.unbuild'
+ res_id = move_line.move_id.unbuild_id.id
+ ref = move_line.move_id.unbuild_id.name
+ if move_line.move_id.consume_unbuild_id:
+ res_model = 'mrp.unbuild'
+ res_id = move_line.move_id.consume_unbuild_id.id
+ ref = move_line.move_id.consume_unbuild_id.name
+ return res_model, res_id, ref
+
+ @api.model
+ def _get_linked_move_lines(self, move_line):
+ move_lines, is_used = super(MrpStockReport, self)._get_linked_move_lines(move_line)
+ if not move_lines:
+ move_lines = (move_line.move_id.consume_unbuild_id and move_line.produce_line_ids) or (move_line.move_id.production_id and move_line.consume_line_ids)
+ if not is_used:
+ is_used = (move_line.move_id.unbuild_id and move_line.consume_line_ids) or (move_line.move_id.raw_material_production_id and move_line.produce_line_ids)
+ return move_lines, is_used
diff --git a/addons/mrp/models/stock_warehouse.py b/addons/mrp/models/stock_warehouse.py
new file mode 100644
index 00000000..0390d887
--- /dev/null
+++ b/addons/mrp/models/stock_warehouse.py
@@ -0,0 +1,308 @@
+# -*- coding: utf-8 -*-
+# Part of Odoo. See LICENSE file for full copyright and licensing details.
+
+from odoo import api, fields, models, _
+from odoo.exceptions import ValidationError, UserError
+
+
+class StockWarehouse(models.Model):
+ _inherit = 'stock.warehouse'
+
+ manufacture_to_resupply = fields.Boolean(
+ 'Manufacture to Resupply', default=True,
+ help="When products are manufactured, they can be manufactured in this warehouse.")
+ manufacture_pull_id = fields.Many2one(
+ 'stock.rule', 'Manufacture Rule')
+ manufacture_mto_pull_id = fields.Many2one(
+ 'stock.rule', 'Manufacture MTO Rule')
+ pbm_mto_pull_id = fields.Many2one(
+ 'stock.rule', 'Picking Before Manufacturing MTO Rule')
+ sam_rule_id = fields.Many2one(
+ 'stock.rule', 'Stock After Manufacturing Rule')
+ manu_type_id = fields.Many2one(
+ 'stock.picking.type', 'Manufacturing Operation Type',
+ domain="[('code', '=', 'mrp_operation'), ('company_id', '=', company_id)]", check_company=True)
+
+ pbm_type_id = fields.Many2one('stock.picking.type', 'Picking Before Manufacturing Operation Type', check_company=True)
+ sam_type_id = fields.Many2one('stock.picking.type', 'Stock After Manufacturing Operation Type', check_company=True)
+
+ manufacture_steps = fields.Selection([
+ ('mrp_one_step', 'Manufacture (1 step)'),
+ ('pbm', 'Pick components and then manufacture (2 steps)'),
+ ('pbm_sam', 'Pick components, manufacture and then store products (3 steps)')],
+ 'Manufacture', default='mrp_one_step', required=True,
+ help="Produce : Move the components to the production location\
+ directly and start the manufacturing process.\nPick / Produce : Unload\
+ the components from the Stock to Input location first, and then\
+ transfer it to the Production location.")
+
+ pbm_route_id = fields.Many2one('stock.location.route', 'Picking Before Manufacturing Route', ondelete='restrict')
+
+ pbm_loc_id = fields.Many2one('stock.location', 'Picking before Manufacturing Location', check_company=True)
+ sam_loc_id = fields.Many2one('stock.location', 'Stock after Manufacturing Location', check_company=True)
+
+ def get_rules_dict(self):
+ result = super(StockWarehouse, self).get_rules_dict()
+ production_location_id = self._get_production_location()
+ for warehouse in self:
+ result[warehouse.id].update({
+ 'mrp_one_step': [],
+ 'pbm': [
+ self.Routing(warehouse.lot_stock_id, warehouse.pbm_loc_id, warehouse.pbm_type_id, 'pull'),
+ self.Routing(warehouse.pbm_loc_id, production_location_id, warehouse.manu_type_id, 'pull'),
+ ],
+ 'pbm_sam': [
+ self.Routing(warehouse.lot_stock_id, warehouse.pbm_loc_id, warehouse.pbm_type_id, 'pull'),
+ self.Routing(warehouse.pbm_loc_id, production_location_id, warehouse.manu_type_id, 'pull'),
+ self.Routing(warehouse.sam_loc_id, warehouse.lot_stock_id, warehouse.sam_type_id, 'push'),
+ ],
+ })
+ result[warehouse.id].update(warehouse._get_receive_rules_dict())
+ return result
+
+ @api.model
+ def _get_production_location(self):
+ location = self.env['stock.location'].search([('usage', '=', 'production'), ('company_id', '=', self.company_id.id)], limit=1)
+ if not location:
+ raise UserError(_('Can\'t find any production location.'))
+ return location
+
+ def _get_routes_values(self):
+ routes = super(StockWarehouse, self)._get_routes_values()
+ routes.update({
+ 'pbm_route_id': {
+ 'routing_key': self.manufacture_steps,
+ 'depends': ['manufacture_steps', 'manufacture_to_resupply'],
+ 'route_update_values': {
+ 'name': self._format_routename(route_type=self.manufacture_steps),
+ 'active': self.manufacture_steps != 'mrp_one_step',
+ },
+ 'route_create_values': {
+ 'product_categ_selectable': True,
+ 'warehouse_selectable': True,
+ 'product_selectable': False,
+ 'company_id': self.company_id.id,
+ 'sequence': 10,
+ },
+ 'rules_values': {
+ 'active': True,
+ }
+ }
+ })
+ routes.update(self._get_receive_routes_values('manufacture_to_resupply'))
+ return routes
+
+ def _get_route_name(self, route_type):
+ names = {
+ 'mrp_one_step': _('Manufacture (1 step)'),
+ 'pbm': _('Pick components and then manufacture'),
+ 'pbm_sam': _('Pick components, manufacture and then store products (3 steps)'),
+ }
+ if route_type in names:
+ return names[route_type]
+ else:
+ return super(StockWarehouse, self)._get_route_name(route_type)
+
+ def _get_global_route_rules_values(self):
+ rules = super(StockWarehouse, self)._get_global_route_rules_values()
+ location_src = self.manufacture_steps == 'mrp_one_step' and self.lot_stock_id or self.pbm_loc_id
+ production_location = self._get_production_location()
+ location_id = self.manufacture_steps == 'pbm_sam' and self.sam_loc_id or self.lot_stock_id
+ rules.update({
+ 'manufacture_pull_id': {
+ 'depends': ['manufacture_steps', 'manufacture_to_resupply'],
+ 'create_values': {
+ 'action': 'manufacture',
+ 'procure_method': 'make_to_order',
+ 'company_id': self.company_id.id,
+ 'picking_type_id': self.manu_type_id.id,
+ 'route_id': self._find_global_route('mrp.route_warehouse0_manufacture', _('Manufacture')).id
+ },
+ 'update_values': {
+ 'active': self.manufacture_to_resupply,
+ 'name': self._format_rulename(location_id, False, 'Production'),
+ 'location_id': location_id.id,
+ 'propagate_cancel': self.manufacture_steps == 'pbm_sam'
+ },
+ },
+ 'manufacture_mto_pull_id': {
+ 'depends': ['manufacture_steps', 'manufacture_to_resupply'],
+ 'create_values': {
+ 'procure_method': 'mts_else_mto',
+ 'company_id': self.company_id.id,
+ 'action': 'pull',
+ 'auto': 'manual',
+ 'route_id': self._find_global_route('stock.route_warehouse0_mto', _('Make To Order')).id,
+ 'location_id': production_location.id,
+ 'location_src_id': location_src.id,
+ 'picking_type_id': self.manu_type_id.id
+ },
+ 'update_values': {
+ 'name': self._format_rulename(location_src, production_location, 'MTO'),
+ 'active': self.manufacture_to_resupply,
+ },
+ },
+ 'pbm_mto_pull_id': {
+ 'depends': ['manufacture_steps', 'manufacture_to_resupply'],
+ 'create_values': {
+ 'procure_method': 'make_to_order',
+ 'company_id': self.company_id.id,
+ 'action': 'pull',
+ 'auto': 'manual',
+ 'route_id': self._find_global_route('stock.route_warehouse0_mto', _('Make To Order')).id,
+ 'name': self._format_rulename(self.lot_stock_id, self.pbm_loc_id, 'MTO'),
+ 'location_id': self.pbm_loc_id.id,
+ 'location_src_id': self.lot_stock_id.id,
+ 'picking_type_id': self.pbm_type_id.id
+ },
+ 'update_values': {
+ 'active': self.manufacture_steps != 'mrp_one_step' and self.manufacture_to_resupply,
+ }
+ },
+ # The purpose to move sam rule in the manufacture route instead of
+ # pbm_route_id is to avoid conflict with receipt in multiple
+ # step. For example if the product is manufacture and receipt in two
+ # step it would conflict in WH/Stock since product could come from
+ # WH/post-prod or WH/input. We do not have this conflict with
+ # manufacture route since it is set on the product.
+ 'sam_rule_id': {
+ 'depends': ['manufacture_steps', 'manufacture_to_resupply'],
+ 'create_values': {
+ 'procure_method': 'make_to_order',
+ 'company_id': self.company_id.id,
+ 'action': 'pull',
+ 'auto': 'manual',
+ 'route_id': self._find_global_route('mrp.route_warehouse0_manufacture', _('Manufacture')).id,
+ 'name': self._format_rulename(self.sam_loc_id, self.lot_stock_id, False),
+ 'location_id': self.lot_stock_id.id,
+ 'location_src_id': self.sam_loc_id.id,
+ 'picking_type_id': self.sam_type_id.id
+ },
+ 'update_values': {
+ 'active': self.manufacture_steps == 'pbm_sam' and self.manufacture_to_resupply,
+ }
+ }
+
+ })
+ return rules
+
+ def _get_locations_values(self, vals, code=False):
+ values = super(StockWarehouse, self)._get_locations_values(vals, code=code)
+ def_values = self.default_get(['manufacture_steps'])
+ manufacture_steps = vals.get('manufacture_steps', def_values['manufacture_steps'])
+ code = vals.get('code') or code or ''
+ code = code.replace(' ', '').upper()
+ company_id = vals.get('company_id', self.company_id.id)
+ values.update({
+ 'pbm_loc_id': {
+ 'name': _('Pre-Production'),
+ 'active': manufacture_steps in ('pbm', 'pbm_sam'),
+ 'usage': 'internal',
+ 'barcode': self._valid_barcode(code + '-PREPRODUCTION', company_id)
+ },
+ 'sam_loc_id': {
+ 'name': _('Post-Production'),
+ 'active': manufacture_steps == 'pbm_sam',
+ 'usage': 'internal',
+ 'barcode': self._valid_barcode(code + '-POSTPRODUCTION', company_id)
+ },
+ })
+ return values
+
+ def _get_sequence_values(self):
+ values = super(StockWarehouse, self)._get_sequence_values()
+ values.update({
+ 'pbm_type_id': {'name': self.name + ' ' + _('Sequence picking before manufacturing'), 'prefix': self.code + '/PC/', 'padding': 5, 'company_id': self.company_id.id},
+ 'sam_type_id': {'name': self.name + ' ' + _('Sequence stock after manufacturing'), 'prefix': self.code + '/SFP/', 'padding': 5, 'company_id': self.company_id.id},
+ 'manu_type_id': {'name': self.name + ' ' + _('Sequence production'), 'prefix': self.code + '/MO/', 'padding': 5, 'company_id': self.company_id.id},
+ })
+ return values
+
+ def _get_picking_type_create_values(self, max_sequence):
+ data, next_sequence = super(StockWarehouse, self)._get_picking_type_create_values(max_sequence)
+ data.update({
+ 'pbm_type_id': {
+ 'name': _('Pick Components'),
+ 'code': 'internal',
+ 'use_create_lots': True,
+ 'use_existing_lots': True,
+ 'default_location_src_id': self.lot_stock_id.id,
+ 'default_location_dest_id': self.pbm_loc_id.id,
+ 'sequence': next_sequence + 1,
+ 'sequence_code': 'PC',
+ 'company_id': self.company_id.id,
+ },
+ 'sam_type_id': {
+ 'name': _('Store Finished Product'),
+ 'code': 'internal',
+ 'use_create_lots': True,
+ 'use_existing_lots': True,
+ 'default_location_src_id': self.sam_loc_id.id,
+ 'default_location_dest_id': self.lot_stock_id.id,
+ 'sequence': next_sequence + 3,
+ 'sequence_code': 'SFP',
+ 'company_id': self.company_id.id,
+ },
+ 'manu_type_id': {
+ 'name': _('Manufacturing'),
+ 'code': 'mrp_operation',
+ 'use_create_lots': True,
+ 'use_existing_lots': True,
+ 'sequence': next_sequence + 2,
+ 'sequence_code': 'MO',
+ 'company_id': self.company_id.id,
+ },
+ })
+ return data, max_sequence + 4
+
+ def _get_picking_type_update_values(self):
+ data = super(StockWarehouse, self)._get_picking_type_update_values()
+ data.update({
+ 'pbm_type_id': {
+ 'active': self.manufacture_to_resupply and self.manufacture_steps in ('pbm', 'pbm_sam') and self.active,
+ 'barcode': self.code.replace(" ", "").upper() + "-PC",
+ },
+ 'sam_type_id': {
+ 'active': self.manufacture_to_resupply and self.manufacture_steps == 'pbm_sam' and self.active,
+ 'barcode': self.code.replace(" ", "").upper() + "-SFP",
+ },
+ 'manu_type_id': {
+ 'active': self.manufacture_to_resupply and self.active,
+ 'default_location_src_id': self.manufacture_steps in ('pbm', 'pbm_sam') and self.pbm_loc_id.id or self.lot_stock_id.id,
+ 'default_location_dest_id': self.manufacture_steps == 'pbm_sam' and self.sam_loc_id.id or self.lot_stock_id.id,
+ },
+ })
+ return data
+
+ def write(self, vals):
+ if any(field in vals for field in ('manufacture_steps', 'manufacture_to_resupply')):
+ for warehouse in self:
+ warehouse._update_location_manufacture(vals.get('manufacture_steps', warehouse.manufacture_steps))
+ return super(StockWarehouse, self).write(vals)
+
+ def _get_all_routes(self):
+ routes = super(StockWarehouse, self)._get_all_routes()
+ routes |= self.filtered(lambda self: self.manufacture_to_resupply and self.manufacture_pull_id and self.manufacture_pull_id.route_id).mapped('manufacture_pull_id').mapped('route_id')
+ return routes
+
+ def _update_location_manufacture(self, new_manufacture_step):
+ self.mapped('pbm_loc_id').write({'active': new_manufacture_step != 'mrp_one_step'})
+ self.mapped('sam_loc_id').write({'active': new_manufacture_step == 'pbm_sam'})
+
+ def _update_name_and_code(self, name=False, code=False):
+ res = super(StockWarehouse, self)._update_name_and_code(name, code)
+ # change the manufacture stock rule name
+ for warehouse in self:
+ if warehouse.manufacture_pull_id and name:
+ warehouse.manufacture_pull_id.write({'name': warehouse.manufacture_pull_id.name.replace(warehouse.name, name, 1)})
+ return res
+
+class Orderpoint(models.Model):
+ _inherit = "stock.warehouse.orderpoint"
+
+ @api.constrains('product_id')
+ def check_product_is_not_kit(self):
+ if self.env['mrp.bom'].search(['|', ('product_id', 'in', self.product_id.ids),
+ '&', ('product_id', '=', False), ('product_tmpl_id', 'in', self.product_id.product_tmpl_id.ids),
+ ('type', '=', 'phantom')], count=True):
+ raise ValidationError(_("A product with a kit-type bill of materials can not have a reordering rule."))