logistics/物流渠道测算.ipynb

291 lines
9.4 KiB
Plaintext
Raw Normal View History

2025-06-17 13:40:20 +08:00
{
"cells": [
{
"cell_type": "markdown",
"metadata": {},
"source": [
"取订单包裹数据,计算它打单的费用,和理论最小费用"
]
},
{
"cell_type": "code",
2025-07-16 00:07:40 +08:00
"execution_count": null,
2025-06-17 13:40:20 +08:00
"metadata": {},
2025-07-16 00:07:40 +08:00
"outputs": [],
2025-06-17 13:40:20 +08:00
"source": [
"import pandas as pd\n",
"from utils.gtools import MySQLconnect\n",
"\n",
"# 读取需要计算的包裹信息\n",
"with MySQLconnect('ods') as db:\n",
" sql = r\"\"\" \n",
"WITH\n",
"t1 AS (\n",
"SELECT\n",
"order_id,\n",
"spu.`产品品类`,\n",
"spu.`产品分类`,\n",
"SUM(as_value_amount) AS 销售额,\n",
"sum(CASE WHEN opl.order_product_id LIKE '%\\_%' ESCAPE '\\\\' \n",
" AND opl.order_product_id NOT LIKE '%\\_%\\_%' ESCAPE '\\\\' THEN product_num END) AS product_num,\n",
"order_date,\n",
"count(opl.SKU) AS 产品数\n",
"FROM\n",
"dws.fact_order_product_list opl\n",
"LEFT JOIN stg_bayshop_litfad_sku sku ON opl.SKU=sku.SKU\n",
"LEFT JOIN stg_bayshop_litfad_spu spu ON sku.产品PID = spu.`产品PID`\n",
"WHERE\n",
" NOT EXISTS (\n",
" SELECT 1 \n",
" FROM dws.log_order_reissue_detail AS r \n",
" WHERE r.order_product_id = opl.order_product_id\n",
" )\n",
"\tAND order_date >= \"20250501\"\n",
"AND order_date < \"20250612\"\n",
"GROUP BY order_id\n",
")\n",
"-- ,\n",
"-- t2 AS (\n",
"SELECT\t\t\tt1.`产品品类`,\n",
"\t\t\t\t\t\tt1.`产品分类`,\n",
" t1.order_id,\n",
"\t\t\t\t\t\tt1.销售额,\n",
"\t\t\t\t\t\tt1.order_date,\n",
" a.包裹号,\n",
" a.快递公司,\n",
" a.运输方式,\n",
"\t\t\t\t\t\ta.`目的国`,\n",
" d.postcode,\n",
" b.length AS `长`,\n",
" b.width AS `宽`,\n",
" b.hight AS `高`,\n",
" b.weight AS `重量`\n",
" FROM\n",
"\t\t\t\tt1\n",
" LEFT JOIN order_express a ON t1.order_id = a.单号\n",
" JOIN package_vol_info b ON a.`包裹号` = b.package\n",
" JOIN order_list d ON a.`单号` = d.order_id \n",
" WHERE\n",
" a.`包裹状态` not IN ( '已作废') \n",
" AND b.hight > 0 \n",
" AND b.length > 0 \n",
" AND b.width > 0 \n",
" AND b.hight > 0 \n",
" AND b.weight > 0\n",
" AND a.`目的国` = \"United States\"\n",
"AND `快递公司` NOT REGEXP \"易可达|USPS|XMILES|WWEX|美西卡派|美东卡派|大包\"\n",
"AND `运输方式` REGEXP \"海运\"\n",
"AND a.`投递时间` >= '2025-05-17'\n",
"AND a.`投递时间` < '2025-06-12'\n",
"ORDER BY order_date\n",
"-- )\n",
"-- SELECT\n",
"-- `快递公司`,\n",
"-- count(*)\n",
"-- FROM\n",
"-- t2\n",
"-- GROUP BY 快递公司\n",
"-- \n",
"\n",
" \"\"\"\n",
" df=pd.read_sql(sql,db.con)\n",
" print(df)\n",
" df.to_clipboard(index=False)"
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"计算实际物流渠道打单的预估费用,以订单为单位计算,然后价格给每个订单的每个包裹"
]
},
{
"cell_type": "code",
2025-07-16 00:07:40 +08:00
"execution_count": null,
2025-06-17 13:40:20 +08:00
"metadata": {},
2025-07-16 00:07:40 +08:00
"outputs": [],
2025-06-17 13:40:20 +08:00
"source": [
"\n",
"for index,row in df.iterrows():\n",
" if \"METRO-SAIR\" in row['快递公司']:\n",
" df.loc[index,'类型'] = \"Metro-SAIR\"\n",
" elif \"海MS-AMT-SAIR\" in row['快递公司']:\n",
" df.loc[index,'类型'] = \"AM-美西\"\n",
" elif \"海NY-AMT-SAIR\" in row['快递公司']:\n",
" df.loc[index,'类型'] = \"AM-美东\"\n",
" elif any(x in row['快递公司'] for x in [\"FEDEX-SAIR-G\", \"FEDEX-SAIR-H\", \"FEDEX\"]):\n",
" df.loc[index,'类型'] = \"快递\"\n",
"df"
]
},
{
"cell_type": "code",
2025-07-16 00:07:40 +08:00
"execution_count": null,
2025-06-17 13:40:20 +08:00
"metadata": {},
2025-07-16 00:07:40 +08:00
"outputs": [],
2025-07-16 00:05:12 +08:00
"source": [
"import pandas as pd\n",
"# 取数据源\n",
"df = pd.read_excel(r'C:\\Users\\Admin\\Desktop\\订单利润率分析1.1.xlsx', sheet_name='包裹详情')\n",
"df = df[df['尾端费用'] == None]\n",
"# 快递公司映射\n",
"company_mapping =pd.read_excel(r'C:\\Users\\Admin\\Desktop\\订单利润率分析1.1.xlsx', sheet_name='快递公司',usecols=[0, 1])\n",
"company_mapping = dict(zip(company_mapping['快递公司'], company_mapping['company']))\n",
"df"
]
},
{
"cell_type": "code",
2025-07-16 00:07:40 +08:00
"execution_count": null,
2025-07-16 00:05:12 +08:00
"metadata": {},
2025-07-16 00:07:40 +08:00
"outputs": [],
2025-06-17 13:40:20 +08:00
"source": [
"from utils.countryOperator import OperateCountry\n",
"from utils.logisticsBill import BillFactory\n",
"from utils.Package import Package, Package_group\n",
"import pandas as pd\n",
"import json\n",
"import re\n",
"# 美国 \n",
"from utils.logisticsBill import Billing\n",
2025-07-16 00:05:12 +08:00
"# opCountry = OperateCountry('US')\n",
"df_grouped= df.groupby('快递跟踪号')\n",
2025-06-17 13:40:20 +08:00
"for order_num, group in df_grouped:\n",
" postcode = group['postcode'].iloc[0]\n",
" if pd.isna(postcode) or str(postcode).lower() == \"nan\":\n",
" continue\n",
"\n",
2025-07-16 00:05:12 +08:00
" packages= Package_group() # Metro-SAIR\n",
" company_name = company_mapping[group['快递公司'].iloc[0]]\n",
" if company_name == \"无\" or company_name == \"可补\":\n",
" continue\n",
" opCountry = OperateCountry(group['目的国'].iloc[0])\n",
" total_weight=0 # 按体积重分费用\n",
2025-06-17 13:40:20 +08:00
" for index,row in group.iterrows():\n",
2025-07-16 00:05:12 +08:00
" if row['length'] == 0 or row['width'] == 0 or row['hight'] == 0 or row['weight'] == 0:\n",
2025-06-17 13:40:20 +08:00
" continue\n",
2025-07-16 00:05:12 +08:00
" total_weight = row['体积重']\n",
" package = Package(row['包裹号'],row['length'],row['width'],row['hight'],row['weight'])\n",
" packages.add_package(package)\n",
" try:\n",
" bill1 = Billing(str(index),opCountry,packages,postcode,company_name=company_name,head_type=1,beizhu='1')\n",
" for index,row in group.iterrows():\n",
" propertion = row['体积重']/total_weight\n",
" df.loc[df['包裹号']==row['包裹号'],'尾端费用'] = bill1.tail_amount[0]*propertion\n",
" df.loc[df['快递跟踪号']==order_num,'账单详情'] = bill1.bill_dict()\n",
" except:\n",
" continue\n",
" print(index,bill1.bill_dict())\n"
]
},
{
"cell_type": "code",
2025-07-16 00:07:40 +08:00
"execution_count": null,
2025-07-16 00:05:12 +08:00
"metadata": {},
"outputs": [],
"source": [
"df.to_excel(r'C:\\Users\\Admin\\Desktop\\订单利润率分析1.xlsx',index=False)"
2025-06-17 13:40:20 +08:00
]
},
{
"cell_type": "markdown",
"metadata": {},
"source": [
"计算最优渠道费用"
]
},
{
"cell_type": "code",
2025-07-16 00:07:40 +08:00
"execution_count": null,
2025-06-17 13:40:20 +08:00
"metadata": {},
2025-07-16 00:07:40 +08:00
"outputs": [],
2025-06-17 13:40:20 +08:00
"source": [
"from utils.countryOperator import OperateCountry\n",
"from utils.logisticsBill import BillFactory\n",
"from utils.Package import Package, Package_group\n",
"import pandas as pd\n",
"import json\n",
"import re\n",
"# 美国 \n",
"from utils.logisticsBill import Billing\n",
"import requests\n",
"opCountry = OperateCountry('US')\n",
"for order_num, group in df_grouped:\n",
" postcode = group['postcode'].iloc[0]\n",
" if pd.isna(postcode) or str(postcode).lower() == \"nan\":\n",
" continue\n",
" \n",
" \n",
" if \"海运\" in row['运输方式']:\n",
" head_type = 1\n",
" else:\n",
" head_type = 0\n",
" # 包裹打包\n",
" packages = Package_group()\n",
" for index,row in group.iterrows():\n",
" package = Package(row['包裹号'],row['长'],row['宽'],row['高'],row['重量'])\n",
" packages.add_package(package)\n",
" if packages is None:\n",
" continue\n",
"\n",
" bill = Billing(str(order_num),opCountry,packages,postcode,company_name=None,head_type=head_type,beizhu='1')\n",
" head_price = bill.head_amount[0]\n",
" tail_price = bill.tail_amount[0]\n",
" if \"USPS\" in row['快递公司']:\n",
" tail_price = tail_price/2\n",
" df.loc[df['order_id']==order_num,'最优尾端费用'] = tail_price/len(packages)\n",
" # df.loc[index,\"尾端货币\"] = bill.tail_amount[1]\n",
" df.loc[df['order_id']==order_num,'最优渠道'] = bill.company_name\n",
" print(f\"order_num:{order_num},最优尾端费用:{tail_price/len(packages)},最优渠道:{bill.company_name}\")\n",
"df.to_clipboard(index=False)\n",
" "
]
},
{
"cell_type": "code",
2025-07-16 00:07:40 +08:00
"execution_count": null,
2025-06-17 13:40:20 +08:00
"metadata": {},
"outputs": [],
"source": [
"df.to_clipboard(index=False)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "base",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.11.5"
}
},
"nbformat": 4,
"nbformat_minor": 2
}