logistics/自动化跟单.ipynb

6.4 KiB

In [ ]:
import pandas as pd 
import requests 
from login_for_cookie import Vc
import pendulum
import json
from utils.gtools import MySQLconnect 
from lxml import etree
from pathlib import Path
from  datetime import datetime
cookie = Vc()
headers = {"Cookie":cookie}
In [ ]:
def get_fs(row):
    data = row["采购单号"]
    buy_url = "http://cp.maso.hk/index.php?main=store_in_receive"

    payload =  {
    "s_pid": 2867159,
    "s_suborderid": "",
    "pdt_standard_id": "",
    "s_store": -1,
    "s_status[0]": 0,
    "s_sort": "desc",
    "Submit": "查询",
    "synergy_sign": -1,
    "start_dep": "",
    "start_user": "",
    "to_dep": "",
    "to_user": "",
    "swebid": 0,
    "local_pdt_id": 0,
    "sadd_time": "",
    "sadd_time_end": "",
    "sreal_express_no": "",
    "sweb_id": "",
    "sof": 0,
    "soe": 0,
    "sod": "",
    "tsid": -1,
    "tsdid": -1,
    "tsst": -1,
    "order_cate": -1,
    "error_status": -1,
    "web_uid": -1,
    "team_id": -1,
    "handle": -1,
    "handle_uid": 0,
    "handle_time_start": "",
    "handle_time_end": "",
    "shiping_time_start": "",
    "shiping_time_end": "",
    "trade_time_start": "",
    "trade_time_end": "",
    "order_express_fee": -1,
    "web_type": -1,
    "shipping_type": -1,
    "maintain_add_time_s": "",
    "maintain_add_time_e": "",
    "maintain_comfirm_time_s": "",
    "maintain_comfirm_time_e": "",
    "dispense_user_id": "",
    "export_page": 1
    }
    payload["s_pid"] = data
    resp = requests.post(url = buy_url ,headers=headers , data=payload)
    buy_df_list = pd.read_html(resp.text,match="所属网站ID")
    buy_df = buy_df_list[1]
    buy_df.columns = buy_df.loc[0,:]
    trade_id = buy_df["交易ID"][1]
    #交易明细里头
    trade_url = f"http://cp.maso.hk/index.php?main=store_tradelist_info&trade_id={trade_id}"
    resp2 = requests.get(url = trade_url ,headers=headers )
    df_list2 = pd.read_html(resp2.text,match="下单发货时间")
    df2 = df_list2[0]
    df2.columns = df2.loc[0,:]
    下单发货时间 = int(df2.iloc[2,1][0].replace("--","0"))
    维护发货时间 = int(df2.iloc[2,3].replace("--","0"))
    支付时间 = pendulum.parse(df2.iloc[3,3])
    最大发货时间 = 支付时间.add(days=max(下单发货时间,维护发货时间))
    #交易列表
    trade_list_url = f"http://cp.maso.hk/index.php?main=store_in_receive&navlist=trade_list&s_trade_id={trade_id}"
    resp3 = requests.get(url = trade_list_url ,headers=headers )
    df_list3 = pd.read_html(resp3.text,match="涨幅")
    df3 = df_list3[0]
    df3.columns = df3.loc[0,:]
    交易平台 = df3["交易平台"][1]
    交易号 = df3["交易号"][1]
    交易平台订单号 = df3["交易平台订单号"][1]
    
    return pd.Series([trade_id,下单发货时间, 维护发货时间,支付时间,最大发货时间,交易平台,交易号,交易平台订单号])
In [ ]:
url = f"https://cp.maso.hk/index.php?main=orderlist_info&id={order_id}"
resp = requests.get(url = url ,headers=headers)
In [ ]:
import pandas as pd 
df_list = pd.read_html(resp.text,match="基础估算")
goaldf = df_list[1]
In [ ]:
df = pd.read_excel(r"F:\DOCUMENTS\WXWork\1688854527635889\Cache\File\2025-05\新建Microsoft Excel 工作表 (2)(1).xlsx",sheet_name="Sheet5")
In [ ]:
import pandas as pd 

def a(order_id,package):
    url = f"https://cp.maso.hk/index.php?main=orderlist_info&id={order_id}"
    resp = requests.get(url = url ,headers=headers)
    df_list = pd.read_html(resp.text,match="基础估算")
    goaldf = df_list[1]
    for idx,value in goaldf[1].items():
        if str(package) == str(value):
            fees = goaldf[5][idx+1]
            fee_list= fees.split('  ')
            fee1 = fee_list[1]
            fee2 = fee_list[2]
            print(fees)
            fee = (float(fee1.replace(',','')) + float(fee2.replace(',','')))/7
            print(fee1,fee2,fee)
    return round(fee,2)
In [ ]:
for index,row in df.iterrows():
    if row['快递公司'] =="海MS-WWEX-SAIR":
        df.at[index,'尾端'] = a(row["order_id"],row["包裹号"])
    else:
        continue
df.to_clipboard(index=False)