From 5df3de7dcd0deee52f2b5e80aa1f6c21dff28ca3 Mon Sep 17 00:00:00 2001 From: mgw <1392924357@qq.com> Date: Fri, 23 Aug 2024 10:05:17 +0800 Subject: [PATCH] =?UTF-8?q?=E5=A2=9E=E5=8A=A0=E8=BF=94=E5=B7=A5=E6=95=B0?= =?UTF-8?q?=E9=87=8F=E7=AD=89?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- sf_machine_connect/controllers/controllers.py | 122 +++++++++++++++++- sf_manufacturing/models/mrp_workorder.py | 3 + 2 files changed, 123 insertions(+), 2 deletions(-) diff --git a/sf_machine_connect/controllers/controllers.py b/sf_machine_connect/controllers/controllers.py index 51a5bd92..a40b148d 100644 --- a/sf_machine_connect/controllers/controllers.py +++ b/sf_machine_connect/controllers/controllers.py @@ -4,10 +4,20 @@ import ast import json import base64 import logging +import psycopg2 from datetime import datetime, timedelta from odoo import http from odoo.http import request +# 数据库连接配置 +db_config = { + "database": "timeseries_db", + "user": "postgres", + "password": "postgres", + "port": "5432", + "host": "172.16.10.98" +} + def convert_to_seconds(time_str): # 修改正则表达式,使 H、M、S 部分可选 @@ -152,7 +162,7 @@ class Sf_Dashboard_Connect(http.Controller): 'first_online_duration': first_online_duration, # 停机时间:关机时间 - 运行时间 # 停机时长:关机时间 - 初次上线时间 - 'img': base64.b64encode(machine_data.machine_tool_picture).decode('utf-8'), + 'img': f'data:image/png;base64,{machine_data.machine_tool_picture.decode("utf-8")}', }) return json.dumps(res) @@ -341,6 +351,12 @@ class Sf_Dashboard_Connect(http.Controller): # 工单不良累计 plan_data_fault_counts = plan_obj.search_count( [('production_line_id.name', '=', line), ('production_id.state', 'in', ['scrap', 'cancel'])]) + + # 工单返工数量 + + plan_data_rework_counts = plan_obj.search_count( + [('production_line_id.name', '=', line), ('production_id.state', 'in', ['rework'])]) + # 工单完成率 finishe_rate = round( (plan_data_finish_counts / plan_data_total_counts if plan_data_total_counts > 0 else 0), 3) @@ -356,6 +372,7 @@ class Sf_Dashboard_Connect(http.Controller): 'plan_data_fault_counts': plan_data_fault_counts, 'finishe_rate': finishe_rate, 'plan_data_progress_deviation': plan_data_progress_deviation, + 'plan_data_rework_counts': plan_data_rework_counts } res['data'][line] = data @@ -405,9 +422,22 @@ class Sf_Dashboard_Connect(http.Controller): (date_field_name, '>=', date.strftime('%Y-%m-%d 00:00:00')), (date_field_name, '<', next_day.strftime('%Y-%m-%d 00:00:00')) ]) + + rework_orders = plan_obj.search( + [('production_line_id.name', '=', line), ('state', 'in', ['rework']), + (date_field_name, '>=', date.strftime('%Y-%m-%d 00:00:00')), + (date_field_name, '<', next_day.strftime('%Y-%m-%d 00:00:00')) + ]) + not_passed_orders = plan_obj.search( + [('production_line_id.name', '=', line), ('state', 'in', ['scrap', 'cancel']), + (date_field_name, '>=', date.strftime('%Y-%m-%d 00:00:00')), + (date_field_name, '<', next_day.strftime('%Y-%m-%d 00:00:00')) + ]) order_counts.append({ 'date': date.strftime('%Y-%m-%d'), - 'order_count': len(orders) + 'order_count': len(orders), + 'rework_orders': len(rework_orders), + 'not_passed_orders': len(not_passed_orders) }) # 外面包一层,没什么是包一层不能解决的,包一层就能区分了,类似于包一层div # 外面包一层的好处是,可以把多个数据结构打包在一起,方便前端处理 @@ -590,3 +620,91 @@ class Sf_Dashboard_Connect(http.Controller): # 开始包一层 res['data'][line] = {'not_done_data': not_done_data, 'done_data': done_data} return json.dumps(res) + + # 查询pg库来获得待机次数 + @http.route('/api/IdleAlarmCount', type='http', auth='public', methods=['GET', 'POST'], csrf=False, cors="*") + def idle_count(self, **kw): + """ + 查询设备的待机次数 + """ + res = {'status': 1, 'message': '成功', 'data': {}} + logging.info('前端请求机床数据的参数为:%s' % kw) + + # 连接数据库 + conn = psycopg2.connect(**db_config) + cur = conn.cursor() + try: + # 获取请求的机床数据 + machine_list = ast.literal_eval(kw['machine_list']) + idle_times = [] + idle_dict = {} + + for item in machine_list: + sql = ''' + SELECT idle_start_time,alarm_time,alarm_repair_time FROM device_data WHERE device_name = %s; + ''' + # 执行SQL命令 + cur.execute(sql, (item,)) + result = cur.fetchall() + # # print('result', result) + # + # # 将查询结果添加到idle_times列表中 + # idle_times = [row[0] for row in result if row[0] is not None] + # + # # 对结果去重 + # unique_idle_times = set(idle_times) + # # print('unique_idle_times', unique_idle_times) + # + # # 统计去重后的数量 + # idle_count = len(unique_idle_times) + # # idle_dict[item] = idle_count + # + # res['data'][item] = idle_count + + total_alarm_time = 0 + alarm_count = 0 + alarm_time_list = [] + idle_times = [] + alarm_times = [] + + for row in result: + idle_start_time = row[0] + alarm_time = row[1] + alarm_repair_time = row[2] + + alarm_time_list.append(alarm_time) # 将时长累加,以秒为单位 + idle_times.append(idle_start_time) + # if alarm_repair_time is not None: + # alarm_times.append(alarm_repair_time) + alarm_times.append(alarm_repair_time) + + # 对结果去重 + unique_total_alarm_time = set(alarm_time_list) + unique_idle_times = set(idle_times) + unique_alarm_times = set(alarm_times) + + # 统计去重后的数量 + idle_count = len(unique_idle_times) + + for alarm_time in unique_total_alarm_time: + if alarm_time is not None: + total_alarm_time += abs(float(alarm_time)) + + alarm_count = len(unique_alarm_times) if unique_alarm_times else 0 + alarm_count = alarm_count if total_alarm_time else 0 + + # 存储待机次数和总待机时长(单位:秒) + res['data'][item] = { + 'idle_count': idle_count, + 'total_alarm_time': total_alarm_time / 3600, # 以秒为单位 + 'alarm_count': alarm_count + } + + # 返回统计结果 + return json.dumps(res) + except Exception as e: + print(f"An error occurred: {e}") + return json.dumps(res) + finally: + cur.close() + conn.close() diff --git a/sf_manufacturing/models/mrp_workorder.py b/sf_manufacturing/models/mrp_workorder.py index 6f6a4be9..a69160cb 100644 --- a/sf_manufacturing/models/mrp_workorder.py +++ b/sf_manufacturing/models/mrp_workorder.py @@ -1167,6 +1167,9 @@ class ResMrpWorkOrder(models.Model): record.process_state = '待解除装夹' # record.write({'process_state': '待加工'}) record.production_id.process_state = '待解除装夹' + self.env['sf.production.plan'].sudo().search([('name', '=', record.production_id.name)]).write({ + 'state': 'finished' + }) record.production_id.write({'detection_result_ids': [(0, 0, { 'rework_reason': record.reason, 'detailed_reason': record.detailed_reason,