提交 a0843550 authored 作者: 贺阳's avatar 贺阳

Merge branch 'develop' into feature/PDA扫码揽收

# Please enter a commit message to explain why this merge is necessary, # especially if it merges an updated upstream into a topic branch. # # Lines starting with '#' will be ignored, and an empty message aborts # the commit.
...@@ -41,6 +41,7 @@ ...@@ -41,6 +41,7 @@
'views/cc_history_package_good_view.xml', 'views/cc_history_package_good_view.xml',
'views/cc_history_ship_package_view.xml', 'views/cc_history_ship_package_view.xml',
'views/cc_history_package_sync_log_view.xml', 'views/cc_history_package_sync_log_view.xml',
'views/history_tt_api_log.xml',
'views/menu_view.xml', 'views/menu_view.xml',
# 'views/cc_customers_declaration_order_view.xml', # 'views/cc_customers_declaration_order_view.xml',
'templates/login.xml', 'templates/login.xml',
......
...@@ -14,4 +14,5 @@ from . import res_config_setting ...@@ -14,4 +14,5 @@ from . import res_config_setting
from . import cc_history_big_package from . import cc_history_big_package
from . import cc_history_package_good from . import cc_history_package_good
from . import cc_history_ship_package from . import cc_history_ship_package
from . import cc_history_package_sync_log from . import cc_history_package_sync_log
\ No newline at end of file from . import history_tt_api_log
...@@ -639,11 +639,13 @@ class CcBL(models.Model): ...@@ -639,11 +639,13 @@ class CcBL(models.Model):
history_days = self.env['ir.config_parameter'].sudo().get_param('history_days') or 180 history_days = self.env['ir.config_parameter'].sudo().get_param('history_days') or 180
history_limit = self.env['ir.config_parameter'].sudo().get_param('history_limit') or 50 history_limit = self.env['ir.config_parameter'].sudo().get_param('history_limit') or 50
origin_delete = self.env['ir.config_parameter'].sudo().get_param('origin_delete') or 1 origin_delete = self.env['ir.config_parameter'].sudo().get_param('origin_delete') or 1
history_limit_log = self.env['ir.config_parameter'].sudo().get_param('history_limit_log') or 5000
redis_conn = self.env['common.common'].sudo().get_redis() redis_conn = self.env['common.common'].sudo().get_redis()
vals = { vals = {
'history_days': history_days, 'history_days': history_days,
'history_limit': history_limit, 'history_limit': history_limit,
'origin_delete': origin_delete 'origin_delete': origin_delete,
'history_limit_log': history_limit_log
} }
redis_conn.lpush('history_data_list', json.dumps(vals)) redis_conn.lpush('history_data_list', json.dumps(vals))
......
# -*- coding: utf-8 -*-
# Part of SmartGo. See LICENSE file for full copyright and licensing details.
from datetime import datetime
from odoo import models, fields, api, _
from odoo.exceptions import ValidationError, Warning
import logging
_logger = logging.getLogger(__name__)
class HistoryTTErrorLog(models.Model):
_name = "history.tt.api.log"
_description = "tt推送日志"
_order = 'id desc'
big_bag_no = fields.Char('业务信息', index=True)
push_time = fields.Datetime('产生时间', index=True)
error_msg = fields.Char('失败原因')
success_bl = fields.Boolean('是否成功', default=False, index=True)
data_text = fields.Text('传输数据')
request_id = fields.Char('请求id', index=True)
source = fields.Selection([('推入', '推入'), ('推出', '推出')], string='类型', default='推入')
...@@ -73,4 +73,5 @@ order_state_change_rule_group_user,order_state_change_rule_group_user,ccs_base.m ...@@ -73,4 +73,5 @@ order_state_change_rule_group_user,order_state_change_rule_group_user,ccs_base.m
access_cc_history_big_package_base.group_user,cc_history_big_package base.group_user,ccs_base.model_cc_history_big_package,base.group_user,1,1,1,1 access_cc_history_big_package_base.group_user,cc_history_big_package base.group_user,ccs_base.model_cc_history_big_package,base.group_user,1,1,1,1
access_cc_history_package_good_base.group_user,cc_history_package_good base.group_user,ccs_base.model_cc_history_package_good,base.group_user,1,1,1,1 access_cc_history_package_good_base.group_user,cc_history_package_good base.group_user,ccs_base.model_cc_history_package_good,base.group_user,1,1,1,1
access_cc_history_ship_package_base.group_user,cc_history_ship_package base.group_user,ccs_base.model_cc_history_ship_package,base.group_user,1,1,1,1 access_cc_history_ship_package_base.group_user,cc_history_ship_package base.group_user,ccs_base.model_cc_history_ship_package,base.group_user,1,1,1,1
access_cc_history_package_sync_log_base.group_user,cc_history_package_sync_log base.group_user,ccs_base.model_cc_history_package_sync_log,base.group_user,1,1,1,1 access_cc_history_package_sync_log_base.group_user,cc_history_package_sync_log base.group_user,ccs_base.model_cc_history_package_sync_log,base.group_user,1,1,1,1
\ No newline at end of file access_history_tt_api_log_base.group_user,history_tt_api_log base.group_user,ccs_base.model_history_tt_api_log,base.group_user,1,1,1,1
\ No newline at end of file
...@@ -7,6 +7,7 @@ ...@@ -7,6 +7,7 @@
<field name="arch" type="xml"> <field name="arch" type="xml">
<tree string="Ship Package" decoration-warning="is_cancel==True"> <tree string="Ship Package" decoration-warning="is_cancel==True">
<field optional="show" name="state" string="Progress" widget="badge" decoration-info="1 == 1"/> <field optional="show" name="state" string="Progress" widget="badge" decoration-info="1 == 1"/>
<field name="is_sync" string="是否同步"/>
<field optional="show" name="process_time"/> <field optional="show" name="process_time"/>
<field optional="show" name="tracking_no" string="Tracking No."/> <field optional="show" name="tracking_no" string="Tracking No."/>
<field optional="show" name="trade_no"/> <field optional="show" name="trade_no"/>
......
<?xml version="1.0" encoding="utf-8"?>
<odoo>
<data>
# ---------- TIKTOK推送日志 ------------
<record model="ir.ui.view" id="tree_history_tt_api_log_view">
<field name="name">tree.history.tt.api.log</field>
<field name="model">history.tt.api.log</field>
<field name="arch" type="xml">
<tree string="TIKTOK推送日志">
<field name="source"/>
<field name="big_bag_no"/>
<field name="push_time"/>
<field name="error_msg"/>
<field name="success_bl"/>
<field name="request_id"/>
<field name="create_date" optional="hide"/>
</tree>
</field>
</record>
<record model="ir.ui.view" id="form_history_tt_api_log_view">
<field name="name">form.history.tt.api.log</field>
<field name="model">history.tt.api.log</field>
<field name="arch" type="xml">
<form string="TIKTOK推送日志">
<sheet>
<group>
<field name="source"/>
<field name="big_bag_no"/>
<field name="push_time"/>
<field name="error_msg"/>
<field name="success_bl"/>
<field name="request_id"/>
<field name="data_text"/>
</group>
</sheet>
</form>
</field>
</record>
<record model="ir.ui.view" id="search_history_tt_api_log_view">
<field name="name">search.history.tt.api.log</field>
<field name="model">history.tt.api.log</field>
<field name="arch" type="xml">
<search string="TIKTOK推送日志">
<filter string="今日日志"
domain="[('create_date', '&gt;=', (datetime.datetime.combine(context_today(), datetime.time(0, 0, 0)).to_utc()).strftime('%Y-%m-%d %H:%M:%S')), ('create_date', '&lt;', (datetime.datetime.combine(context_today(), datetime.time(0, 0, 0)).to_utc() + datetime.timedelta(days=1)).strftime('%Y-%m-%d %H:%M:%S'))]"
name="create_date"/>
<filter string="昨日日志"
domain="[('create_date', '>=', (datetime.datetime.combine(context_today() - datetime.timedelta(days=1), datetime.time(0, 0, 0)).to_utc()).strftime('%Y-%m-%d %H:%M:%S')), ('create_date', '&lt;', (datetime.datetime.combine(context_today(), datetime.time(0, 0, 0)).to_utc()).strftime('%Y-%m-%d %H:%M:%S'))]"
name="last_24h"/>
<filter string="近7日日志"
domain="[('create_date', '>=', (datetime.datetime.combine(context_today() - datetime.timedelta(days=7), datetime.time(0, 0, 0)).to_utc()).strftime('%Y-%m-%d %H:%M:%S')),('create_date', '&lt;', (datetime.datetime.combine(context_today(), datetime.time(0, 0, 0)).to_utc() + datetime.timedelta(days=1)).strftime('%Y-%m-%d %H:%M:%S'))]"
name="last_7d"/>
<filter string="本周日志" name="this_week_log"
domain="[('create_date', '&gt;=', (datetime.datetime.combine(context_today() + relativedelta(weeks=-1,days=1,weekday=0), datetime.time(0,0,0)).to_utc()).strftime('%Y-%m-%d %H:%M:%S')),
('create_date', '&lt;', (datetime.datetime.combine(context_today() + relativedelta(days=1,weekday=0), datetime.time(0,0,0)).to_utc()).strftime('%Y-%m-%d %H:%M:%S'))]"/>
<filter string="上周日志" name="last_week_log"
domain="[('create_date', '&gt;=', (datetime.datetime.combine(context_today() + relativedelta(weeks=-2,days=1,weekday=0), datetime.time(0,0,0)).to_utc()).strftime('%Y-%m-%d %H:%M:%S')),
('create_date', '&lt;', (datetime.datetime.combine(context_today() + relativedelta(weeks=-1,days=1,weekday=0), datetime.time(0,0,0)).to_utc()).strftime('%Y-%m-%d %H:%M:%S'))]"/>
<!-- <filter string="本月日志" name="this_month_log"-->
<!-- domain="[('create_date','&gt;=', time.strftime('%Y-%m-1 00:00:00')),('create_date','&lt;',(context_today() + relativedelta(months=1)).strftime('%Y-%m-1 00:00:00'))]"/>-->
<!-- <filter string="上月日志" name="last_month_log"-->
<!-- domain="[('create_date','&lt;', time.strftime('%Y-%m-1 00:00:00')),('create_date','&gt;=',(context_today() - relativedelta(months=1)).strftime('%Y-%m-1 00:00:00'))]"/>-->
<filter string="近30日日志" name="last_30d"
domain="[('create_date', '>=', (datetime.datetime.combine(context_today() - datetime.timedelta(days=30), datetime.time(0, 0, 0)).to_utc()).strftime('%Y-%m-%d %H:%M:%S')), ('create_date', '&lt;', (datetime.datetime.combine(context_today(), datetime.time(0, 0, 0)).to_utc() + datetime.timedelta(days=1)).strftime('%Y-%m-%d %H:%M:%S'))]"/>
<separator/>
<field name="big_bag_no"/>
<field name="data_text"/>
<separator/>
<filter name="filter_success_bl" string="成功" domain="[('success_bl','=',True)]"/>
<filter name="filter_not_success_bl" string="失败" domain="[('success_bl','=',False)]"/>
<separator/>
<group expand="0" string="分组">
<filter domain="[]" name="groupby_success_bl" string="是否成功"
context="{'group_by': 'success_bl'}"/>
<filter domain="[]" name="groupby_big_bag_no" string="业务信息"
context="{'group_by': 'big_bag_no'}"/>
<filter domain="[]" name="groupby_source" string="类型"
context="{'group_by': 'source'}"/>
</group>
<searchpanel>
<field icon="fa-users" select="multi" name="source"/>
</searchpanel>
</search>
</field>
</record>
<record model="ir.actions.act_window" id="action_history_tt_api_log">
<field name="name">TIKTOK推送日志</field>
<field name="res_model">history.tt.api.log</field>
<field name="view_mode">tree,form,search</field>
<field name="domain">[]</field>
<field name="context">{'search_default_last_30d':1}</field>
</record>
</data>
</odoo>
\ No newline at end of file
...@@ -61,5 +61,7 @@ ...@@ -61,5 +61,7 @@
<menuitem parent="menu_cc_history_data" sequence="3" name="History Ship Package" id="menu_cc_history_ship_package" action="action_cc_history_ship_package"/> <menuitem parent="menu_cc_history_data" sequence="3" name="History Ship Package" id="menu_cc_history_ship_package" action="action_cc_history_ship_package"/>
<menuitem parent="menu_cc_history_data" id="menu_history_flight_tt_api_log" name="TIKTOK推送日志" sequence="7" action="action_history_tt_api_log"/>
</data> </data>
</odoo> </odoo>
\ No newline at end of file
...@@ -11,13 +11,10 @@ import pandas as pd ...@@ -11,13 +11,10 @@ import pandas as pd
# from line_profiler import LineProfiler # from line_profiler import LineProfiler
# 创建一个 LineProfiler 实例 # 创建一个 LineProfiler 实例
# profiler = LineProfiler() # profiler = LineProfiler()
# 默认字符gbk
logging.basicConfig(filename='logs/history_data_logger.log', level=logging.INFO)
# 设置文件字符为utf-8 # 设置文件字符为utf-8
# logging.basicConfig(handlers=[logging.FileHandler('logs/mail_push.log', 'a', 'utf-8')], logging.basicConfig(handlers=[logging.FileHandler('logs/history_data_logger.log', 'a', 'utf-8')],
# format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO) format='%(asctime)s %(levelname)s %(message)s', level=logging.INFO)
class Order_dispose(object): class Order_dispose(object):
...@@ -41,7 +38,7 @@ class Order_dispose(object): ...@@ -41,7 +38,7 @@ class Order_dispose(object):
pg_conn = psycopg2.connect(**config.postgresql_options) pg_conn = psycopg2.connect(**config.postgresql_options)
cursor = pg_conn.cursor() cursor = pg_conn.cursor()
db_handle = self.conn_engine db_handle = self.conn_engine
current_date = datetime.today() current_date = datetime.utcnow()
# 计算180天之前的日期 # 计算180天之前的日期
past_date = current_date - timedelta(days=days) past_date = current_date - timedelta(days=days)
return pg_conn, cursor, db_handle, past_date return pg_conn, cursor, db_handle, past_date
...@@ -278,7 +275,31 @@ class Order_dispose(object): ...@@ -278,7 +275,31 @@ class Order_dispose(object):
def delete_origin_data(self): def delete_origin_data(self):
pass pass
def history_ao_tt_log(self, db_handle, past_date, limit_log_num):
sql = 'select id,big_bag_no,push_time,error_msg,success_bl,data_text,request_id,source from ao_tt_api_log where create_date < %s order by create_date asc limit %s;'
log_result_arr = pd.read_sql(sql, con=db_handle, params=(past_date, int(limit_log_num)))
tk_log_vals_list = []
for tk_log_result in log_result_arr.itertuples():
tk_log_data = tk_log_result
# 创建历史tk日志数据
tk_log_vals = {
'big_bag_no': tk_log_data.big_bag_no,
'push_time': tk_log_data.push_time,
'error_msg': tk_log_data.error_msg,
'success_bl': tk_log_data.success_bl,
'data_text': tk_log_data.data_text,
'request_id': tk_log_data.request_id,
'source': tk_log_data.source,
}
tk_log_vals_list.append(tk_log_vals)
if tk_log_vals_list:
val_df = pd.DataFrame(tk_log_vals_list)
val_df.to_sql('history_tt_api_log', con=db_handle, if_exists='append', index=False)
origin_tk_log_ids = log_result_arr['id'].tolist()
return origin_tk_log_ids
# @profiler # @profiler
"""
def order_data(self, data): def order_data(self, data):
res_data = [] res_data = []
try: try:
...@@ -286,12 +307,14 @@ class Order_dispose(object): ...@@ -286,12 +307,14 @@ class Order_dispose(object):
days = int(data.get('history_days', 180)) # 默认180天之前的数据 days = int(data.get('history_days', 180)) # 默认180天之前的数据
origin_delete = int(data.get('origin_delete', 1)) # 默认删除原数据 origin_delete = int(data.get('origin_delete', 1)) # 默认删除原数据
limit_num = int(data.get('history_limit', 50)) # 默认一次同步50个提单 limit_num = int(data.get('history_limit', 50)) # 默认一次同步50个提单
limit_log_num = int(data.get('history_limit_log', 5000)) # 默认一次同步5000条日志
pg_conn, cursor, db_handle, past_date = self.get_init_data(days) pg_conn, cursor, db_handle, past_date = self.get_init_data(days)
logging.info(f'同步开始,时间:{past_date}')
# 获取历史提单 # 获取历史提单
sql = 'select id,bl_no from cc_bl where (is_history=False or is_history is null) and create_date < %s order by create_date asc limit %s;' sql = 'select id,bl_no from cc_bl where (is_history=False or is_history is null) and create_date < %s order by create_date asc limit %s;'
result_arr = pd.read_sql(sql, con=db_handle, params=(past_date, int(limit_num))) result_arr = pd.read_sql(sql, con=db_handle, params=(past_date, int(limit_num)))
# sql = 'select id,bl_no from cc_bl where id = 14;' sql = 'select id,bl_no from cc_bl where id = 14;'
# result_arr = pd.read_sql(sql, con=db_handle) # , params=(14,) result_arr = pd.read_sql(sql, con=db_handle) # , params=(14,)
# 获取查询结果 # 获取查询结果
# delete_order_ids = [] # delete_order_ids = []
for res in result_arr.itertuples(): for res in result_arr.itertuples():
...@@ -326,6 +349,22 @@ class Order_dispose(object): ...@@ -326,6 +349,22 @@ class Order_dispose(object):
origin_goods_ids = self.history_package_goods(db_handle, ship_package_data.id, history_ship_package_id, history_big_id, origin_goods_ids = self.history_package_goods(db_handle, ship_package_data.id, history_ship_package_id, history_big_id,
select_bl_id) select_bl_id)
delete_good_ids += origin_goods_ids delete_good_ids += origin_goods_ids
# 处理提单下的小包 存在不关联大包的情况
no_big_parcel_sql = "select * from cc_ship_package where bl_id=%s and big_package_id is null;" % select_bl_id
no_big_package_result_arr = pd.read_sql(no_big_parcel_sql, con=db_handle)
# 原小包数据
for no_big_package_data in no_big_package_result_arr.itertuples():
# 处理历史小包
history_no_big_package_id = self.history_ship_package(db_handle, None, no_big_package_data)
delete_package_ids.append(no_big_package_data.id) # 添加删除小包
# 处理历史小包推送日志
origin_log_ids = self.history_sync_log(db_handle, history_no_big_package_id, no_big_package_data.id)
delete_log_ids += origin_log_ids
# 处理历史小包商品
origin_goods_ids = self.history_package_goods(db_handle, no_big_package_data.id,
history_no_big_package_id, None,
select_bl_id)
delete_good_ids += origin_goods_ids
update_sql = 'update cc_bl set is_history=True where id=%s' % select_bl_id update_sql = 'update cc_bl set is_history=True where id=%s' % select_bl_id
pd.read_sql(update_sql, con=db_handle, chunksize=100) pd.read_sql(update_sql, con=db_handle, chunksize=100)
if origin_delete: if origin_delete:
...@@ -347,11 +386,362 @@ class Order_dispose(object): ...@@ -347,11 +386,362 @@ class Order_dispose(object):
delete_log_sql = 'delete from cc_ship_package_sync_log where id in %s' % ids delete_log_sql = 'delete from cc_ship_package_sync_log where id in %s' % ids
pd.read_sql(delete_log_sql, con=db_handle, chunksize=100) pd.read_sql(delete_log_sql, con=db_handle, chunksize=100)
logging.info(f'同步提单:{select_bl_no},删除大包{len(delete_big_ids)}个,删除小包{len(delete_package_ids)}个,删除小包商品{len(delete_good_ids)}个,删除小包日志{len(delete_log_ids)}条') logging.info(f'同步提单:{select_bl_no},删除大包{len(delete_big_ids)}个,删除小包{len(delete_package_ids)}个,删除小包商品{len(delete_good_ids)}个,删除小包日志{len(delete_log_ids)}条')
# tk日志
origin_tk_log_ids = self.history_ao_tt_log(db_handle, past_date, limit_log_num)
if origin_delete and origin_tk_log_ids:
logging.info(f'同步日志完成,删除tk日志:{len(origin_tk_log_ids)}个')
logging.info('同步完成')
except Exception as ex: except Exception as ex:
logging.error('create_history_data error:%s' % str(ex)) logging.error('create_history_data error:%s' % str(ex))
# profiler.print_stats(output_unit=1) # profiler.print_stats(output_unit=1)
return res_data return res_data
"""
def order_data(self, data):
res_data = []
try:
data = json.loads(data)
days = int(data.get('history_days', 180)) # 默认180天之前的数据
origin_delete = int(data.get('origin_delete', 1)) # 默认删除原数据
limit_num = int(data.get('history_limit', 50)) # 默认一次同步50个提单
limit_log_num = int(data.get('history_limit_log', 5000)) # 默认一次同步5000条日志
pg_conn, cursor, db_handle, past_date = self.get_init_data(days)
logging.info(f'同步开始,时间:{past_date}')
# 获取历史提单
sql = 'select id,bl_no from cc_bl where (is_history=False or is_history is null) and create_date < %s order by create_date asc limit %s;'
result_arr = pd.read_sql(sql, con=db_handle, params=(past_date, int(limit_num)))
# sql = 'select id,bl_no from cc_bl where id = 14;'
# result_arr = pd.read_sql(sql, con=db_handle) # , params=(14,)
# 获取查询结果
# delete_order_ids = []
for res in result_arr.itertuples():
select_bl_id = res.id # 原提单id
select_bl_no = res.bl_no
# 查提单下的所有大包
big_sql = 'select id,bl_id,big_package_no,next_provider_name,ship_package_qty,goods_qty,' \
'pallet_number,pallet_usage_date,is_cancel,tally_state,tally_user_id,tally_time,' \
'delivery_user_id,delivery_time from cc_big_package where bl_id = %s;'
big_package_result_arr = pd.read_sql(big_sql, con=db_handle, params=(select_bl_id,))
# 处理大包
origin_big_ids, result_dict = self.batch_history_big_package(db_handle, big_package_result_arr)
# 处理小包
no_big_parcel_sql = "select * from cc_ship_package where bl_id=%s;" % select_bl_id
no_big_package_result_arr = pd.read_sql(no_big_parcel_sql, con=db_handle)
origin_ship_ids, package_dict = self.batch_history_ship_package(db_handle, result_dict, no_big_package_result_arr)
# 处理小包日志
origin_log_ids = self.batch_history_sync_log(db_handle, package_dict, origin_ship_ids)
# 处理小包商品
origin_goods_ids = self.batch_history_package_goods(db_handle, package_dict, origin_ship_ids, result_dict)
update_sql = 'update cc_bl set is_history=True where id=%s' % select_bl_id
pd.read_sql(update_sql, con=db_handle, chunksize=100)
if origin_delete:
# print('delete data')
if origin_big_ids: # 删除原大包
ids = '(%s)' % str(origin_big_ids)[1:-1]
delete_big_sql = 'delete from cc_big_package where id in %s' % ids
pd.read_sql(delete_big_sql, con=db_handle, chunksize=100)
if origin_ship_ids: # 删除原小包
ids = '(%s)' % str(origin_ship_ids)[1:-1]
delete_package_sql = 'delete from cc_ship_package where id in %s' % ids
pd.read_sql(delete_package_sql, con=db_handle, chunksize=100)
if origin_goods_ids: # 删除原小包商品
ids = '(%s)' % str(origin_goods_ids)[1:-1]
delete_goods_sql = 'delete from cc_package_good where id in %s' % ids
pd.read_sql(delete_goods_sql, con=db_handle, chunksize=100)
if origin_log_ids: # 删除原小包同步日志
ids = '(%s)' % str(origin_log_ids)[1:-1]
delete_log_sql = 'delete from cc_ship_package_sync_log where id in %s' % ids
pd.read_sql(delete_log_sql, con=db_handle, chunksize=100)
logging.info(f'同步提单:{select_bl_no},删除大包{len(origin_big_ids)}个,删除小包{len(origin_ship_ids)}个,删除小包商品{len(origin_goods_ids)}个,删除小包同步日志{len(origin_log_ids)}条')
# tk日志
origin_tk_log_ids = self.history_ao_tt_log(db_handle, past_date, limit_log_num)
if origin_delete and origin_tk_log_ids:
logging.info(f'同步日志完成,删除tk日志:{len(origin_tk_log_ids)}个')
logging.info('同步完成')
except Exception as ex:
logging.error('create_history_data error:%s' % str(ex))
# profiler.print_stats(output_unit=1)
return res_data
def batch_history_big_package(self, db_handle, big_package_result_arr):
"""
创建历史大包数据
:param db_handle:
:param big_package_result_arr: 原大包数据
:return: 历史大包id
"""
# 创建历史大包数据
big_package_arr = []
origin_big_ids = []
for big_package_data in big_package_result_arr.itertuples():
origin_id = big_package_data.id
big_package_vals = {
'origin_id': big_package_data.id,
'bl_id': big_package_data.bl_id,
'big_package_no': big_package_data.big_package_no,
'next_provider_name': big_package_data.next_provider_name,
'ship_package_qty': big_package_data.ship_package_qty,
'goods_qty': big_package_data.goods_qty,
'pallet_number': big_package_data.pallet_number,
'pallet_usage_date': big_package_data.pallet_usage_date,
'is_cancel': big_package_data.is_cancel,
'tally_state': big_package_data.tally_state,
'tally_user_id': big_package_data.tally_user_id,
'tally_time': big_package_data.tally_time,
'delivery_user_id': big_package_data.delivery_user_id,
'delivery_time': big_package_data.delivery_time,
# 'exception_info_ids': big_package_data.id,
# exception_info_ids
# goods_ids
# ship_package_ids
}
big_package_arr.append(big_package_vals)
origin_big_ids.append(origin_id)
if big_package_arr:
val_df = pd.DataFrame(big_package_arr)
val_df.to_sql('cc_history_big_package', con=db_handle, if_exists='append', index=False)
#
big_ids_str = '(%s)' % str(origin_big_ids)[1:-1]
sql = 'select id,origin_id from cc_history_big_package where origin_id in %s' % big_ids_str
new_order_arr = pd.read_sql(sql, con=db_handle)
result_dict = {}
for new_order_data in new_order_arr.itertuples():
result_dict[new_order_data.origin_id] = new_order_data.id
# logging.info("new_order:%s" % len(new_order))
if result_dict:
# 大包的异常信息处理
sql = 'select cc_big_package_id,cc_exception_info_id from bigpackage_exception_info_rel where cc_big_package_id in %s' % big_ids_str
exception_order_arr = pd.read_sql(sql, con=db_handle)
exception_vals_arr = []
for exception_order_data in exception_order_arr.itertuples():
exception_vals = {
'cc_history_big_package_id': result_dict.get(exception_order_data.cc_big_package_id, None),
'cc_exception_info_id': exception_order_data.cc_exception_info_id
}
exception_vals_arr.append(exception_vals)
if exception_vals_arr:
val_df = pd.DataFrame(exception_vals_arr)
val_df.to_sql('history_bigpackage_exception_info_rel', con=db_handle, if_exists='append', index=False)
return origin_big_ids, result_dict
def batch_history_ship_package(self, db_handle, big_dict, no_big_package_result_arr):
"""
创建历史小包数据
:param db_handle:
:param no_big_package_result_arr: 原小包数据
:param big_dict: 历史大包数据
:return: 历史小包id
"""
history_ship_package_vals_arr = []
origin_ship_ids = []
for ship_package_data in no_big_package_result_arr.itertuples():
# 创建历史小包数据
history_ship_package_vals = {
'customer_id': ship_package_data.customer_id,
'origin_id': ship_package_data.id,
'bl_id': ship_package_data.bl_id,
'big_package_id': big_dict.get(ship_package_data.id),
'logistic_order_no': ship_package_data.logistic_order_no,
'tracking_no': ship_package_data.tracking_no,
'customer_ref': ship_package_data.customer_ref,
'internal_account_number': ship_package_data.internal_account_number,
'user_track_note': ship_package_data.user_track_note,
'company_code': ship_package_data.company_code,
'trade_no': ship_package_data.trade_no,
'big_package_no': ship_package_data.big_package_no,
'container_no': ship_package_data.container_no,
'buyer_region': ship_package_data.buyer_region,
'next_provider_name': ship_package_data.next_provider_name,
'sender_name': ship_package_data.sender_name,
'sender_vat_no': ship_package_data.sender_vat_no,
'sender_phone': ship_package_data.sender_phone,
'sender_add_1': ship_package_data.sender_add_1,
'sender_add_2': ship_package_data.sender_add_2,
'sender_add_3': ship_package_data.sender_add_3,
'sender_city': ship_package_data.sender_city,
'sender_state': ship_package_data.sender_state,
'sender_postcode': ship_package_data.sender_postcode,
'sender_country': ship_package_data.sender_country,
'receiver_name': ship_package_data.receiver_name,
'receiver_type': ship_package_data.receiver_type,
'receiver_vat_no': ship_package_data.receiver_vat_no,
'receiver_add_1': ship_package_data.receiver_add_1,
'receiver_add_2': ship_package_data.receiver_add_2,
'receiver_add_3': ship_package_data.receiver_add_3,
'receiver_city': ship_package_data.receiver_city,
'receiver_county': ship_package_data.receiver_county,
'receiver_county_translate': ship_package_data.receiver_county_translate,
'receiver_postcode': ship_package_data.receiver_postcode,
'receiver_email': ship_package_data.receiver_email,
'receiver_phone': ship_package_data.receiver_phone,
'gross_weight': ship_package_data.gross_weight,
'weight_unit': ship_package_data.weight_unit,
'currency': ship_package_data.currency,
'currency_id': ship_package_data.currency_id,
'total_value': ship_package_data.total_value,
'shipping_fee': ship_package_data.shipping_fee,
'tax_mark': ship_package_data.tax_mark,
'actual_tax': ship_package_data.actual_tax,
'actual_vat': ship_package_data.actual_vat,
'actual_gst': ship_package_data.actual_gst,
'actual_tax_currency': ship_package_data.actual_tax_currency,
'actual_currency_id': ship_package_data.actual_currency_id,
'actual_tax_date': ship_package_data.actual_tax_date,
'actual_tax_tz': ship_package_data.actual_tax_tz,
'is_cancel': ship_package_data.is_cancel,
'state': ship_package_data.state,
'node_exception_reason_id': ship_package_data.node_exception_reason_id,
'process_time': ship_package_data.process_time,
'cancel_reason': ship_package_data.cancel_reason,
# 'exception_info_ids': ship_package_obj.exception_info_ids,ship_package_data.id
# 'invoice_attachment_ids': ship_package_obj.invoice_attachment_ids,ship_package_data.id
# exception_info_ids
# invoice_attachment_ids
# good_ids
# sync_log_ids
'operation_time': ship_package_data.operation_time,
'state_explain': ship_package_data.state_explain,
'is_sync': ship_package_data.is_sync,
'tk_code': ship_package_data.tk_code,
}
origin_ship_ids.append(ship_package_data.id)
history_ship_package_vals_arr.append(history_ship_package_vals)
if history_ship_package_vals_arr:
val_df = pd.DataFrame(history_ship_package_vals_arr)
val_df.to_sql('cc_history_ship_package', con=db_handle, if_exists='append', index=False)
ship_ids_str = '(%s)' % str(origin_ship_ids)[1:-1]
sql = 'select id,origin_id from cc_history_ship_package where origin_id in %s' % ship_ids_str
new_order_arr = pd.read_sql(sql, con=db_handle)
result_dict = {}
for new_order_data in new_order_arr.itertuples():
result_dict[new_order_data.origin_id] = new_order_data.id
# 小包的异常信息处理
if result_dict:
sql = 'select cc_ship_package_id,cc_exception_info_id from shippackage_exception_info_rel where cc_ship_package_id in %s' % ship_ids_str
exception_order_arr = pd.read_sql(sql, con=db_handle)
exception_vals_arr = []
for exception_order_data in exception_order_arr.itertuples():
exception_vals = {
'cc_history_ship_package_id': result_dict.get(exception_order_data.cc_ship_package_id, None),
'cc_exception_info_id': exception_order_data.cc_exception_info_id
}
exception_vals_arr.append(exception_vals)
if exception_vals_arr:
val_df = pd.DataFrame(exception_vals_arr)
val_df.to_sql('history_package_exception_info_rel', con=db_handle, if_exists='append', index=False)
# 小包的附件信息处理
sql = 'select cc_ship_package_id,ir_attachment_id from ship_package_invoice_attachment_rel where cc_ship_package_id in %s' % ship_ids_str
attachment_order_arr = pd.read_sql(sql, con=db_handle)
attachment_vals_arr = []
for attachment_order_data in attachment_order_arr.itertuples():
attachment_vals = {
'cc_history_ship_package_id': result_dict.get(attachment_order_data.cc_ship_package_id, None),
'ir_attachment_id': attachment_order_data.ir_attachment_id
}
attachment_vals_arr.append(attachment_vals)
if attachment_vals_arr:
val_df = pd.DataFrame(attachment_vals_arr)
val_df.to_sql('history_package_invoice_attachment_rel', con=db_handle, if_exists='append', index=False)
return origin_ship_ids, result_dict
def batch_history_sync_log(self, db_handle, package_dict, origin_ship_ids):
"""
创建历史小包日志
:param db_handle:
:param origin_ship_ids: 小包id
:param package_dict: 原来小包数据
:return:
"""
ids = '(%s)' % str(origin_ship_ids)[1:-1]
sync_sql = "SELECT id, package_id, sync_time, api_customer, process_code, operate_time, operate_remark, operate_user FROM cc_ship_package_sync_log WHERE package_id in %s;" % ids
sync_log_result_arr = pd.read_sql(sync_sql, con=db_handle)
sync_log_vals_list = []
for sync_log_result in sync_log_result_arr.itertuples():
sync_log_data = sync_log_result
# 创建历史小包日志数据
sync_log_vals = {
'package_id': package_dict.get(sync_log_data.package_id, None),
'sync_time': sync_log_data.sync_time,
'api_customer': sync_log_data.api_customer,
'process_code': sync_log_data.process_code,
'operate_time': sync_log_data.operate_time,
'operate_remark': sync_log_data.operate_remark,
'operate_user': sync_log_data.operate_user,
}
sync_log_vals_list.append(sync_log_vals)
if sync_log_vals_list:
val_df = pd.DataFrame(sync_log_vals_list)
val_df.to_sql('cc_history_package_sync_log', con=db_handle, if_exists='append', index=False)
origin_log_ids = sync_log_result_arr['id'].tolist()
return origin_log_ids
def batch_history_package_goods(self, db_handle, package_dict, origin_ship_ids, big_dict):
"""
创建历史小包商品
:param db_handle:
:param origin_ship_ids: 原小包id
:param package_dict: 历史小包id
:param big_dict: 历史大包id
:return:
"""
# 商品
ids = '(%s)' % str(origin_ship_ids)[1:-1]
package_good_sql = """select id, bl_line_id, big_package_id, bl_id, item_id, sku_id,
item_name_cn, item_name_en, export_hs_code, import_hs_code, weight,
quantity, quantity_unit, declare_price, freight, cod_amount, vat_rate,
item_vat, origin_country, item_type, item_total_price, item_link,
item_tax_status, actual_tax, actual_tax_rate, actual_tax_currency,
actual_vat, actual_vat_rate, actual_gst, actual_gst_rate, currency_id,
is_cancel from cc_package_good where bl_line_id in %s;""" % ids
package_good_result_arr = pd.read_sql(package_good_sql, con=db_handle)
# origin_goods_ids = []
package_good_vals_list = []
for package_good_data in package_good_result_arr.itertuples():
# 创建历史小包商品数据
package_good_vals = {
'origin_id': package_good_data.id,
'bl_line_id': package_dict.get(package_good_data.bl_line_id, None),
'big_package_id': big_dict.get(package_good_data.big_package_id, None),
'bl_id': package_good_data.bl_id,
'item_id': package_good_data.item_id,
'sku_id': package_good_data.sku_id,
'item_name_cn': package_good_data.item_name_cn,
'item_name_en': package_good_data.item_name_en,
'export_hs_code': package_good_data.export_hs_code,
'import_hs_code': package_good_data.import_hs_code,
'weight': package_good_data.weight,
'quantity': package_good_data.quantity,
'quantity_unit': package_good_data.quantity_unit,
'declare_price': package_good_data.declare_price,
'freight': package_good_data.freight,
'cod_amount': package_good_data.cod_amount,
'vat_rate': package_good_data.vat_rate,
'item_vat': package_good_data.item_vat,
'origin_country': package_good_data.origin_country,
'item_type': package_good_data.item_type,
'item_total_price': package_good_data.item_total_price,
'item_link': package_good_data.item_link,
'item_tax_status': package_good_data.item_tax_status,
'actual_tax': package_good_data.actual_tax,
'actual_tax_rate': package_good_data.actual_tax_rate,
'actual_tax_currency': package_good_data.actual_tax_currency,
'actual_vat': package_good_data.actual_vat,
'actual_vat_rate': package_good_data.actual_vat_rate,
'actual_gst': package_good_data.actual_gst,
'actual_gst_rate': package_good_data.actual_gst_rate,
'currency_id': package_good_data.currency_id,
'is_cancel': package_good_data.is_cancel,
}
package_good_vals_list.append(package_good_vals)
if package_good_vals_list:
val_df = pd.DataFrame(package_good_vals_list)
val_df.to_sql('cc_history_package_good', con=db_handle, if_exists='append', index=False)
origin_goods_ids = package_good_result_arr['id'].tolist()
return origin_goods_ids
try: try:
pool = redis.ConnectionPool(**config.redis_options) pool = redis.ConnectionPool(**config.redis_options)
......
Markdown 格式
0%
您添加了 0 到此讨论。请谨慎行事。
请先完成此评论的编辑!
注册 或者 后发表评论