Skip to content

Commit

Permalink
BAH-4151 | FIx. Double caluculation of inventory adjustment entries i…
Browse files Browse the repository at this point in the history
…n opening stock (#201)
  • Loading branch information
mohan-13 authored Feb 21, 2025
1 parent 1ca99fd commit 878770e
Showing 1 changed file with 8 additions and 0 deletions.
8 changes: 8 additions & 0 deletions bahmni_reports/report/stock_report.py
Original file line number Diff line number Diff line change
Expand Up @@ -106,6 +106,7 @@ def print_report(self):
('move_id.state','=','done')])]) + sum([int_in.qty_done for int_in in stock_move_line.search([\
('product_id', '=', drug.id),('date', '<', rec.from_date),
('location_dest_id', '=', rec.location_id.id),
('location_id.name','!=', 'Inventory adjustment'),
('picking_id.picking_type_id.sequence_code','=', 'INT'), ##Internal Inward
('move_id.state','=','done')])]) +
sum([po.qty_done for po in stock_move_line.search([('product_id', '=', drug.id),\
Expand All @@ -125,6 +126,7 @@ def print_report(self):
)-(sum([int_out.qty_done for int_out in stock_move_line.search([\
('product_id', '=', drug.id),('date', '<', rec.from_date),
('location_id', '=', rec.location_id.id),
('location_dest_id.name','!=', 'Inventory adjustment'),
('picking_id.picking_type_id.sequence_code','=', 'INT'), ##Internal Outward
('move_id.state','=','done')])]) + sum([issue.qty_done for issue in stock_move_line.search([\
('product_id', '=', drug.id),('date', '<', rec.from_date),
Expand All @@ -149,6 +151,7 @@ def print_report(self):
for int_in in stock_move_line.search([\
('product_id', '=', drug.id),('date', '<', rec.from_date),
('location_dest_id', '=', rec.location_id.id),
('location_id.name','!=', 'Inventory adjustment'),
('picking_id.picking_type_id.sequence_code','=', 'INT'),
('move_id.state','=','done')])])
+ ##Customer Return
Expand Down Expand Up @@ -183,6 +186,7 @@ def print_report(self):
for int_out in stock_move_line.search([\
('product_id', '=', drug.id),('date', '<', rec.from_date),
('location_id', '=', rec.location_id.id),
('location_dest_id.name','!=', 'Inventory adjustment'),
('picking_id.picking_type_id.sequence_code','=', 'INT'),
('move_id.state','=','done')])])
+ ## Issue
Expand Down Expand Up @@ -309,6 +313,7 @@ def print_report(self):
('move_id.state','=','done')]) if po.date < days]) + sum([int_in.qty_done\
for int_in in stock_move_line.search([('product_id', '=', rec.drug_ids.id),
('location_dest_id', '=', rec.location_id.id),
('location_id.name','!=', 'Inventory adjustment'),
('picking_id.picking_type_id.sequence_code','=', 'INT'), ##Internal Inward
('move_id.state','=','done')]) if int_in.date < days]) +
sum([po.qty_done for po in stock_move_line.search([('product_id', '=', rec.drug_ids.id),
Expand All @@ -325,6 +330,7 @@ def print_report(self):
) - (sum([int_out.qty_done\
for int_out in stock_move_line.search([('product_id', '=', rec.drug_ids.id),
('location_id', '=', rec.location_id.id),
('location_dest_id.name','!=', 'Inventory adjustment'),
('picking_id.picking_type_id.sequence_code','=', 'INT'), ##Internal Outward
('move_id.state','=','done')]) if int_out.date < days]) + sum([issue.qty_done\
for issue in stock_move_line.search([('product_id', '=', rec.drug_ids.id),\
Expand All @@ -348,6 +354,7 @@ def print_report(self):
('product_id', '=', rec.drug_ids.id), ('state', '=', 'purchase')], order='id desc',limit=1)]))
for int_in in stock_move_line.search([('product_id', '=', rec.drug_ids.id),
('location_dest_id', '=', rec.location_id.id),
('location_id.name','!=', 'Inventory adjustment'),
('picking_id.picking_type_id.sequence_code','=', 'INT'), ('move_id.state','=','done')])\
if int_in.date < days])

Expand Down Expand Up @@ -385,6 +392,7 @@ def print_report(self):
for int_out in stock_move_line.search([\
('product_id', '=', rec.drug_ids.id),
('location_id', '=', rec.location_id.id),
('location_dest_id.name','!=', 'Inventory adjustment'),
('picking_id.picking_type_id.sequence_code','=', 'INT'),
('move_id.state','=','done')]) if int_out.date < days])
+ ## Issue
Expand Down

0 comments on commit 878770e

Please sign in to comment.