diff --git a/cal_sell_price.ipynb b/cal_sell_price.ipynb index 6845522..ddfb5bd 100644 --- a/cal_sell_price.ipynb +++ b/cal_sell_price.ipynb @@ -2,7 +2,7 @@ "cells": [ { "cell_type": "code", - "execution_count": 2, + "execution_count": null, "metadata": {}, "outputs": [], "source": [ @@ -26,102 +26,9 @@ }, { "cell_type": "code", - "execution_count": 3, + "execution_count": null, "metadata": {}, - "outputs": [ - { - "name": "stderr", - "output_type": "stream", - "text": [ - "C:\\Users\\Admin\\AppData\\Local\\Temp\\ipykernel_9392\\3242304332.py:43: UserWarning: pandas only supports SQLAlchemy connectable (engine/connection) or database string URI or sqlite3 DBAPI2 connection. Other DBAPI2 objects are not tested. Please consider using SQLAlchemy.\n", - " df = pd.read_sql(sql, conn)\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "日期:2024-01-01, 售价计算完成\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "C:\\Users\\Admin\\AppData\\Local\\Temp\\ipykernel_9392\\3242304332.py:43: UserWarning: pandas only supports SQLAlchemy connectable (engine/connection) or database string URI or sqlite3 DBAPI2 connection. Other DBAPI2 objects are not tested. Please consider using SQLAlchemy.\n", - " df = pd.read_sql(sql, conn)\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "日期:2024-02-01, 售价计算完成\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "C:\\Users\\Admin\\AppData\\Local\\Temp\\ipykernel_9392\\3242304332.py:43: UserWarning: pandas only supports SQLAlchemy connectable (engine/connection) or database string URI or sqlite3 DBAPI2 connection. Other DBAPI2 objects are not tested. Please consider using SQLAlchemy.\n", - " df = pd.read_sql(sql, conn)\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "日期:2024-03-01, 售价计算完成\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "C:\\Users\\Admin\\AppData\\Local\\Temp\\ipykernel_9392\\3242304332.py:43: UserWarning: pandas only supports SQLAlchemy connectable (engine/connection) or database string URI or sqlite3 DBAPI2 connection. Other DBAPI2 objects are not tested. Please consider using SQLAlchemy.\n", - " df = pd.read_sql(sql, conn)\n" - ] - }, - { - "name": "stdout", - "output_type": "stream", - "text": [ - "日期:2024-04-01, 售价计算完成\n" - ] - }, - { - "name": "stderr", - "output_type": "stream", - "text": [ - "C:\\Users\\Admin\\AppData\\Local\\Temp\\ipykernel_9392\\3242304332.py:43: UserWarning: pandas only supports SQLAlchemy connectable (engine/connection) or database string URI or sqlite3 DBAPI2 connection. Other DBAPI2 objects are not tested. Please consider using SQLAlchemy.\n", - " df = pd.read_sql(sql, conn)\n" - ] - }, - { - "ename": "KeyboardInterrupt", - "evalue": "", - "output_type": "error", - "traceback": [ - "\u001b[1;31m---------------------------------------------------------------------------\u001b[0m", - "\u001b[1;31mKeyboardInterrupt\u001b[0m Traceback (most recent call last)", - "Cell \u001b[1;32mIn[3], line 44\u001b[0m\n\u001b[0;32m 26\u001b[0m sql \u001b[38;5;241m=\u001b[39m \u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\"\"\u001b[39m\u001b[38;5;124m SELECT\u001b[39m\n\u001b[0;32m 27\u001b[0m \u001b[38;5;124m\tsku.SKU,\u001b[39m\n\u001b[0;32m 28\u001b[0m \u001b[38;5;124m\t包裹数据,\u001b[39m\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 41\u001b[0m \u001b[38;5;124m\tAND DATE_FORMAT( sku.添加时间, \u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m%Y-%m-01\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m )= \u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mdate\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 42\u001b[0m \u001b[38;5;124m \u001b[39m\u001b[38;5;124m\"\"\"\u001b[39m\n\u001b[0;32m 43\u001b[0m df \u001b[38;5;241m=\u001b[39m pd\u001b[38;5;241m.\u001b[39mread_sql(sql, conn)\n\u001b[1;32m---> 44\u001b[0m df1 \u001b[38;5;241m=\u001b[39m cal_sell_price(df)\n\u001b[0;32m 45\u001b[0m df1\u001b[38;5;241m.\u001b[39mto_excel(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m售价_\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mdate\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m.xlsx\u001b[39m\u001b[38;5;124m'\u001b[39m, index\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mFalse\u001b[39;00m)\n\u001b[0;32m 46\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m日期:\u001b[39m\u001b[38;5;132;01m{\u001b[39;00mdate\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m, 售价计算完成\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n", - "Cell \u001b[1;32mIn[3], line 16\u001b[0m, in \u001b[0;36mcal_sell_price\u001b[1;34m(df)\u001b[0m\n\u001b[0;32m 14\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m 15\u001b[0m package_dict \u001b[38;5;241m=\u001b[39m json\u001b[38;5;241m.\u001b[39mloads(row[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m包裹数据\u001b[39m\u001b[38;5;124m'\u001b[39m])\n\u001b[1;32m---> 16\u001b[0m sell_price,order_price,order_type \u001b[38;5;241m=\u001b[39m call_sell_and_order_price(row[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m成本价\u001b[39m\u001b[38;5;124m'\u001b[39m], package_dict)\n\u001b[0;32m 17\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mException\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m e:\n\u001b[0;32m 18\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124m \u001b[39m\u001b[38;5;132;01m{\u001b[39;00mrow[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mSKU\u001b[39m\u001b[38;5;124m'\u001b[39m]\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m 报错: \u001b[39m\u001b[38;5;132;01m{\u001b[39;00me\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m\"\u001b[39m)\n", - "File \u001b[1;32md:\\test\\logistics\\sell\\sell_price.py:120\u001b[0m, in \u001b[0;36mcall_sell_and_order_price\u001b[1;34m(price, package_dict)\u001b[0m\n\u001b[0;32m 118\u001b[0m \u001b[38;5;66;03m# 修改版本\u001b[39;00m\n\u001b[0;32m 119\u001b[0m sell_price \u001b[38;5;241m=\u001b[39m litfad\u001b[38;5;241m.\u001b[39mcal_sell_price_2025()\n\u001b[1;32m--> 120\u001b[0m order_price, order_type \u001b[38;5;241m=\u001b[39m us_ocean_order_price(packages)\n\u001b[0;32m 121\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mround\u001b[39m(sell_price,\u001b[38;5;241m2\u001b[39m),order_price,order_type\n", - "File \u001b[1;32md:\\test\\logistics\\sell\\sell_price.py:23\u001b[0m, in \u001b[0;36mus_ocean_order_price\u001b[1;34m(packages)\u001b[0m\n\u001b[0;32m 21\u001b[0m base_fee \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m999999\u001b[39m\n\u001b[0;32m 22\u001b[0m \u001b[38;5;28;01mbreak\u001b[39;00m\n\u001b[1;32m---> 23\u001b[0m base_fee \u001b[38;5;241m+\u001b[39m\u001b[38;5;241m=\u001b[39m df1[df1[\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mg\u001b[39m\u001b[38;5;124m'\u001b[39m] \u001b[38;5;241m>\u001b[39m\u001b[38;5;241m=\u001b[39m lbs_weight][\u001b[38;5;124m'\u001b[39m\u001b[38;5;124m费用\u001b[39m\u001b[38;5;124m'\u001b[39m]\u001b[38;5;241m.\u001b[39miloc[\u001b[38;5;241m0\u001b[39m]\n\u001b[0;32m 24\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m package\u001b[38;5;241m.\u001b[39mfst_size \u001b[38;5;241m>\u001b[39m\u001b[38;5;241m=\u001b[39m \u001b[38;5;241m116\u001b[39m \u001b[38;5;129;01mor\u001b[39;00m package\u001b[38;5;241m.\u001b[39msed_size \u001b[38;5;241m>\u001b[39m\u001b[38;5;241m=\u001b[39m \u001b[38;5;241m71\u001b[39m \u001b[38;5;129;01mor\u001b[39;00m package\u001b[38;5;241m.\u001b[39mgirth \u001b[38;5;241m>\u001b[39m\u001b[38;5;241m=\u001b[39m \u001b[38;5;241m251\u001b[39m:\n\u001b[0;32m 25\u001b[0m other_fee \u001b[38;5;241m+\u001b[39m\u001b[38;5;241m=\u001b[39m \u001b[38;5;241m16.3\u001b[39m\n", - "File \u001b[1;32mc:\\ProgramData\\anaconda3\\Lib\\site-packages\\pandas\\core\\frame.py:4093\u001b[0m, in \u001b[0;36mDataFrame.__getitem__\u001b[1;34m(self, key)\u001b[0m\n\u001b[0;32m 4091\u001b[0m \u001b[38;5;66;03m# Do we have a (boolean) 1d indexer?\u001b[39;00m\n\u001b[0;32m 4092\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m com\u001b[38;5;241m.\u001b[39mis_bool_indexer(key):\n\u001b[1;32m-> 4093\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_getitem_bool_array(key)\n\u001b[0;32m 4095\u001b[0m \u001b[38;5;66;03m# We are left with two options: a single key, and a collection of keys,\u001b[39;00m\n\u001b[0;32m 4096\u001b[0m \u001b[38;5;66;03m# We interpret tuples as collections only for non-MultiIndex\u001b[39;00m\n\u001b[0;32m 4097\u001b[0m is_single_key \u001b[38;5;241m=\u001b[39m \u001b[38;5;28misinstance\u001b[39m(key, \u001b[38;5;28mtuple\u001b[39m) \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m is_list_like(key)\n", - "File \u001b[1;32mc:\\ProgramData\\anaconda3\\Lib\\site-packages\\pandas\\core\\frame.py:4155\u001b[0m, in \u001b[0;36mDataFrame._getitem_bool_array\u001b[1;34m(self, key)\u001b[0m\n\u001b[0;32m 4152\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcopy(deep\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mNone\u001b[39;00m)\n\u001b[0;32m 4154\u001b[0m indexer \u001b[38;5;241m=\u001b[39m key\u001b[38;5;241m.\u001b[39mnonzero()[\u001b[38;5;241m0\u001b[39m]\n\u001b[1;32m-> 4155\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_take_with_is_copy(indexer, axis\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m0\u001b[39m)\n", - "File \u001b[1;32mc:\\ProgramData\\anaconda3\\Lib\\site-packages\\pandas\\core\\generic.py:4153\u001b[0m, in \u001b[0;36mNDFrame._take_with_is_copy\u001b[1;34m(self, indices, axis)\u001b[0m\n\u001b[0;32m 4142\u001b[0m \u001b[38;5;129m@final\u001b[39m\n\u001b[0;32m 4143\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_take_with_is_copy\u001b[39m(\u001b[38;5;28mself\u001b[39m, indices, axis: Axis \u001b[38;5;241m=\u001b[39m \u001b[38;5;241m0\u001b[39m) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m Self:\n\u001b[0;32m 4144\u001b[0m \u001b[38;5;250m \u001b[39m\u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[0;32m 4145\u001b[0m \u001b[38;5;124;03m Internal version of the `take` method that sets the `_is_copy`\u001b[39;00m\n\u001b[0;32m 4146\u001b[0m \u001b[38;5;124;03m attribute to keep track of the parent dataframe (using in indexing\u001b[39;00m\n\u001b[1;32m (...)\u001b[0m\n\u001b[0;32m 4151\u001b[0m \u001b[38;5;124;03m See the docstring of `take` for full explanation of the parameters.\u001b[39;00m\n\u001b[0;32m 4152\u001b[0m \u001b[38;5;124;03m \"\"\"\u001b[39;00m\n\u001b[1;32m-> 4153\u001b[0m result \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mtake(indices\u001b[38;5;241m=\u001b[39mindices, axis\u001b[38;5;241m=\u001b[39maxis)\n\u001b[0;32m 4154\u001b[0m \u001b[38;5;66;03m# Maybe set copy if we didn't actually change the index.\u001b[39;00m\n\u001b[0;32m 4155\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mndim \u001b[38;5;241m==\u001b[39m \u001b[38;5;241m2\u001b[39m \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m result\u001b[38;5;241m.\u001b[39m_get_axis(axis)\u001b[38;5;241m.\u001b[39mequals(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_get_axis(axis)):\n", - "File \u001b[1;32mc:\\ProgramData\\anaconda3\\Lib\\site-packages\\pandas\\core\\generic.py:4133\u001b[0m, in \u001b[0;36mNDFrame.take\u001b[1;34m(self, indices, axis, **kwargs)\u001b[0m\n\u001b[0;32m 4128\u001b[0m \u001b[38;5;66;03m# We can get here with a slice via DataFrame.__getitem__\u001b[39;00m\n\u001b[0;32m 4129\u001b[0m indices \u001b[38;5;241m=\u001b[39m np\u001b[38;5;241m.\u001b[39marange(\n\u001b[0;32m 4130\u001b[0m indices\u001b[38;5;241m.\u001b[39mstart, indices\u001b[38;5;241m.\u001b[39mstop, indices\u001b[38;5;241m.\u001b[39mstep, dtype\u001b[38;5;241m=\u001b[39mnp\u001b[38;5;241m.\u001b[39mintp\n\u001b[0;32m 4131\u001b[0m )\n\u001b[1;32m-> 4133\u001b[0m new_data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_mgr\u001b[38;5;241m.\u001b[39mtake(\n\u001b[0;32m 4134\u001b[0m indices,\n\u001b[0;32m 4135\u001b[0m axis\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_get_block_manager_axis(axis),\n\u001b[0;32m 4136\u001b[0m verify\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m,\n\u001b[0;32m 4137\u001b[0m )\n\u001b[0;32m 4138\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_constructor_from_mgr(new_data, axes\u001b[38;5;241m=\u001b[39mnew_data\u001b[38;5;241m.\u001b[39maxes)\u001b[38;5;241m.\u001b[39m__finalize__(\n\u001b[0;32m 4139\u001b[0m \u001b[38;5;28mself\u001b[39m, method\u001b[38;5;241m=\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mtake\u001b[39m\u001b[38;5;124m\"\u001b[39m\n\u001b[0;32m 4140\u001b[0m )\n", - "File \u001b[1;32mc:\\ProgramData\\anaconda3\\Lib\\site-packages\\pandas\\core\\internals\\managers.py:894\u001b[0m, in \u001b[0;36mBaseBlockManager.take\u001b[1;34m(self, indexer, axis, verify)\u001b[0m\n\u001b[0;32m 891\u001b[0m indexer \u001b[38;5;241m=\u001b[39m maybe_convert_indices(indexer, n, verify\u001b[38;5;241m=\u001b[39mverify)\n\u001b[0;32m 893\u001b[0m new_labels \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39maxes[axis]\u001b[38;5;241m.\u001b[39mtake(indexer)\n\u001b[1;32m--> 894\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mreindex_indexer(\n\u001b[0;32m 895\u001b[0m new_axis\u001b[38;5;241m=\u001b[39mnew_labels,\n\u001b[0;32m 896\u001b[0m indexer\u001b[38;5;241m=\u001b[39mindexer,\n\u001b[0;32m 897\u001b[0m axis\u001b[38;5;241m=\u001b[39maxis,\n\u001b[0;32m 898\u001b[0m allow_dups\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m,\n\u001b[0;32m 899\u001b[0m copy\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mNone\u001b[39;00m,\n\u001b[0;32m 900\u001b[0m )\n", - "File \u001b[1;32mc:\\ProgramData\\anaconda3\\Lib\\site-packages\\pandas\\core\\internals\\managers.py:687\u001b[0m, in \u001b[0;36mBaseBlockManager.reindex_indexer\u001b[1;34m(self, new_axis, indexer, axis, fill_value, allow_dups, copy, only_slice, use_na_proxy)\u001b[0m\n\u001b[0;32m 680\u001b[0m new_blocks \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_slice_take_blocks_ax0(\n\u001b[0;32m 681\u001b[0m indexer,\n\u001b[0;32m 682\u001b[0m fill_value\u001b[38;5;241m=\u001b[39mfill_value,\n\u001b[0;32m 683\u001b[0m only_slice\u001b[38;5;241m=\u001b[39monly_slice,\n\u001b[0;32m 684\u001b[0m use_na_proxy\u001b[38;5;241m=\u001b[39muse_na_proxy,\n\u001b[0;32m 685\u001b[0m )\n\u001b[0;32m 686\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m--> 687\u001b[0m new_blocks \u001b[38;5;241m=\u001b[39m [\n\u001b[0;32m 688\u001b[0m blk\u001b[38;5;241m.\u001b[39mtake_nd(\n\u001b[0;32m 689\u001b[0m indexer,\n\u001b[0;32m 690\u001b[0m axis\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m1\u001b[39m,\n\u001b[0;32m 691\u001b[0m fill_value\u001b[38;5;241m=\u001b[39m(\n\u001b[0;32m 692\u001b[0m fill_value \u001b[38;5;28;01mif\u001b[39;00m fill_value \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;28;01melse\u001b[39;00m blk\u001b[38;5;241m.\u001b[39mfill_value\n\u001b[0;32m 693\u001b[0m ),\n\u001b[0;32m 694\u001b[0m )\n\u001b[0;32m 695\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m blk \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mblocks\n\u001b[0;32m 696\u001b[0m ]\n\u001b[0;32m 698\u001b[0m new_axes \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mlist\u001b[39m(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39maxes)\n\u001b[0;32m 699\u001b[0m new_axes[axis] \u001b[38;5;241m=\u001b[39m new_axis\n", - "File \u001b[1;32mc:\\ProgramData\\anaconda3\\Lib\\site-packages\\pandas\\core\\internals\\managers.py:688\u001b[0m, in \u001b[0;36m\u001b[1;34m(.0)\u001b[0m\n\u001b[0;32m 680\u001b[0m new_blocks \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_slice_take_blocks_ax0(\n\u001b[0;32m 681\u001b[0m indexer,\n\u001b[0;32m 682\u001b[0m fill_value\u001b[38;5;241m=\u001b[39mfill_value,\n\u001b[0;32m 683\u001b[0m only_slice\u001b[38;5;241m=\u001b[39monly_slice,\n\u001b[0;32m 684\u001b[0m use_na_proxy\u001b[38;5;241m=\u001b[39muse_na_proxy,\n\u001b[0;32m 685\u001b[0m )\n\u001b[0;32m 686\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m 687\u001b[0m new_blocks \u001b[38;5;241m=\u001b[39m [\n\u001b[1;32m--> 688\u001b[0m blk\u001b[38;5;241m.\u001b[39mtake_nd(\n\u001b[0;32m 689\u001b[0m indexer,\n\u001b[0;32m 690\u001b[0m axis\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m1\u001b[39m,\n\u001b[0;32m 691\u001b[0m fill_value\u001b[38;5;241m=\u001b[39m(\n\u001b[0;32m 692\u001b[0m fill_value \u001b[38;5;28;01mif\u001b[39;00m fill_value \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;28;01melse\u001b[39;00m blk\u001b[38;5;241m.\u001b[39mfill_value\n\u001b[0;32m 693\u001b[0m ),\n\u001b[0;32m 694\u001b[0m )\n\u001b[0;32m 695\u001b[0m \u001b[38;5;28;01mfor\u001b[39;00m blk \u001b[38;5;129;01min\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mblocks\n\u001b[0;32m 696\u001b[0m ]\n\u001b[0;32m 698\u001b[0m new_axes \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mlist\u001b[39m(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39maxes)\n\u001b[0;32m 699\u001b[0m new_axes[axis] \u001b[38;5;241m=\u001b[39m new_axis\n", - "File \u001b[1;32mc:\\ProgramData\\anaconda3\\Lib\\site-packages\\pandas\\core\\internals\\blocks.py:1307\u001b[0m, in \u001b[0;36mBlock.take_nd\u001b[1;34m(self, indexer, axis, new_mgr_locs, fill_value)\u001b[0m\n\u001b[0;32m 1304\u001b[0m allow_fill \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mTrue\u001b[39;00m\n\u001b[0;32m 1306\u001b[0m \u001b[38;5;66;03m# Note: algos.take_nd has upcast logic similar to coerce_to_target_dtype\u001b[39;00m\n\u001b[1;32m-> 1307\u001b[0m new_values \u001b[38;5;241m=\u001b[39m algos\u001b[38;5;241m.\u001b[39mtake_nd(\n\u001b[0;32m 1308\u001b[0m values, indexer, axis\u001b[38;5;241m=\u001b[39maxis, allow_fill\u001b[38;5;241m=\u001b[39mallow_fill, fill_value\u001b[38;5;241m=\u001b[39mfill_value\n\u001b[0;32m 1309\u001b[0m )\n\u001b[0;32m 1311\u001b[0m \u001b[38;5;66;03m# Called from three places in managers, all of which satisfy\u001b[39;00m\n\u001b[0;32m 1312\u001b[0m \u001b[38;5;66;03m# these assertions\u001b[39;00m\n\u001b[0;32m 1313\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(\u001b[38;5;28mself\u001b[39m, ExtensionBlock):\n\u001b[0;32m 1314\u001b[0m \u001b[38;5;66;03m# NB: in this case, the 'axis' kwarg will be ignored in the\u001b[39;00m\n\u001b[0;32m 1315\u001b[0m \u001b[38;5;66;03m# algos.take_nd call above.\u001b[39;00m\n", - "File \u001b[1;32mc:\\ProgramData\\anaconda3\\Lib\\site-packages\\pandas\\core\\array_algos\\take.py:117\u001b[0m, in \u001b[0;36mtake_nd\u001b[1;34m(arr, indexer, axis, fill_value, allow_fill)\u001b[0m\n\u001b[0;32m 114\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m arr\u001b[38;5;241m.\u001b[39mtake(indexer, fill_value\u001b[38;5;241m=\u001b[39mfill_value, allow_fill\u001b[38;5;241m=\u001b[39mallow_fill)\n\u001b[0;32m 116\u001b[0m arr \u001b[38;5;241m=\u001b[39m np\u001b[38;5;241m.\u001b[39masarray(arr)\n\u001b[1;32m--> 117\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m _take_nd_ndarray(arr, indexer, axis, fill_value, allow_fill)\n", - "File \u001b[1;32mc:\\ProgramData\\anaconda3\\Lib\\site-packages\\pandas\\core\\array_algos\\take.py:162\u001b[0m, in \u001b[0;36m_take_nd_ndarray\u001b[1;34m(arr, indexer, axis, fill_value, allow_fill)\u001b[0m\n\u001b[0;32m 157\u001b[0m out \u001b[38;5;241m=\u001b[39m np\u001b[38;5;241m.\u001b[39mempty(out_shape, dtype\u001b[38;5;241m=\u001b[39mdtype)\n\u001b[0;32m 159\u001b[0m func \u001b[38;5;241m=\u001b[39m _get_take_nd_function(\n\u001b[0;32m 160\u001b[0m arr\u001b[38;5;241m.\u001b[39mndim, arr\u001b[38;5;241m.\u001b[39mdtype, out\u001b[38;5;241m.\u001b[39mdtype, axis\u001b[38;5;241m=\u001b[39maxis, mask_info\u001b[38;5;241m=\u001b[39mmask_info\n\u001b[0;32m 161\u001b[0m )\n\u001b[1;32m--> 162\u001b[0m func(arr, indexer, out, fill_value)\n\u001b[0;32m 164\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m flip_order:\n\u001b[0;32m 165\u001b[0m out \u001b[38;5;241m=\u001b[39m out\u001b[38;5;241m.\u001b[39mT\n", - "\u001b[1;31mKeyboardInterrupt\u001b[0m: " - ] - } - ], + "outputs": [], "source": [ "# 计算售价\n", "\n", @@ -171,6 +78,1432 @@ " print(f\"日期:{date}, 售价计算完成\")\n", "\n" ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "美国2024版订单物流费\n" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "\n", + "\n", + "import math\n", + "\n", + "import pandas as pd\n", + "\n", + "from utils.gtools import MySQLconnect\n", + "mat = MySQLconnect('mat')\n", + "engine = mat.engine()\n", + "df1 = pd.read_sql('select * from fedex2_0814', engine)\n", + "import math\n", + "# 美国海运订单费用,返回单个sku的订单费用和订单类型\n", + "def us_ocean_order_price(packages):\n", + " # 处理包裹\n", + " if packages is None or len(packages) == 0:\n", + " return 0, '包裹数为0'\n", + " \n", + " order_fee = 0\n", + " base_fee = 0\n", + " other_fee = 0\n", + " order_type1 = '' # 快递订单类型\n", + " # 快递费用\n", + " for package in packages:\n", + " lbs_weight = package.get_volume_weight(8.5)\n", + " if package.weight > 67000 or package.fst_size > 264 or lbs_weight > 67950 or package.girth > 391:\n", + " base_fee = 999999\n", + " break\n", + " base_fee += df1[df1['g'] >= lbs_weight]['费用'].iloc[0]\n", + " if package.fst_size >= 116 or package.sed_size >= 71 or package.girth >= 251:\n", + " other_fee += 16.3\n", + " order_type1 += '超长' # 超长费\n", + " if package.weight >= 21000 and package.fst_size < 238 and package.girth < 315:\n", + " other_fee += 25.5\n", + " order_type1 += '超重' # 超重费:\n", + " if package.fst_size >= 238 or package.girth >= 315:\n", + " other_fee += 118.7\n", + " order_type1 += '大包裹费' # 大包裹费\n", + " express_fee = base_fee + other_fee\n", + " \n", + " # 卡派(步长为3)\n", + " ltl_base = 0\n", + " ltl_fee = 0\n", + " count1 = 0\n", + " count2 = 0\n", + " count3 = 0\n", + " count4 = 0\n", + " order_type2 = '卡派'\n", + " order_other_type1 = ''\n", + " order_other_type2 = ''\n", + " order_other_type3 = ''\n", + " order_other_type4 = ''\n", + " order_ltl_oversize = 0\n", + " order_ltl_overweight1 = 0\n", + " order_ltl_overweight2 = 0\n", + " order_ltl_overpackage = 0\n", + " sku_total_cubic_feet = 0\n", + " for package in packages:\n", + " cubic_feet= package.length * package.width * package.height / 1000000 * 35.3\n", + " sku_total_cubic_feet += cubic_feet\n", + " # 卡派额外费用\n", + " if package.fst_size>= 250:\n", + " count1 += 1\n", + " order_ltl_oversize = 118\n", + " order_other_type1 = '超长'\n", + " if package.weight >= 111000:\n", + " count2 += 1\n", + " order_ltl_overweight1 = 78\n", + " order_other_type2 = '超重'\n", + " if package.weight >= 130000:\n", + " count3 += 1\n", + " order_ltl_overweight2 = 30\n", + " order_other_type3 = '超重'\n", + " if package.fst_size >= 310:\n", + " count4 += 1\n", + " order_ltl_overpackage = 30\n", + " order_other_type4 = '大包裹'\n", + " order_type2 += order_other_type3 + order_other_type1 + order_other_type2 + order_other_type4\n", + "\n", + " # 卡派基础费用 体积/1000000 *35.3\n", + " if sku_total_cubic_feet < 25:\n", + " ltl_base = round(163 / 0.45 / 2, 2) # 181.11\n", + "\n", + " elif sku_total_cubic_feet < 35:\n", + " ltl_base = round(180 / 0.45 / 2, 2) # 200\n", + " else:\n", + "\t # 大于一个立方的(35立方英尺) 按照每立方英尺*5美金\n", + " # 最低为190美金\n", + " ltl_base = round(max(190, 5 * sku_total_cubic_feet) / 0.45 / 2)\n", + "\n", + " \n", + " ltl_fee = math.ceil(count1 / 3) * order_ltl_oversize + math.ceil(count2 / 3) * order_ltl_overweight1 + math.ceil(\n", + " count3 / 3) * order_ltl_overweight2 + math.ceil(count4 / 3) * order_ltl_overpackage + ltl_base\n", + "\n", + " if ltl_fee < express_fee:\n", + " order_fee = ltl_fee\n", + " order_type = order_type2\n", + " else:\n", + " order_fee = express_fee\n", + " order_type = order_type1\n", + " return order_fee, order_type\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "美国2024版订单物流费" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [], + "source": [ + "# 英国海运订单费用,返回单个sku的订单费用和订单类型\n", + "def uk_ocean_order_price(packages):\n", + " # 计算uk经济直达费用\n", + " order_fee = 0\n", + " express_fee = 0\n", + " order_type1 = '' # 订单类型\n", + " ltl_fee = 0\n", + " if packages is None or len(packages) == 0:\n", + " return 0, '包裹数为0'\n", + " num = len(packages)\n", + " if num > 30:\n", + " return 0, '包裹数超过30'\n", + " for package in packages:\n", + " base_fee = 0\n", + " other_fee1 = 0\n", + " other_fee2 = 0\n", + " girth = package.girth\n", + " if package.fst_size <= 90 and package.sed_size <= 50 and package.trd_size <= 50 and package.weight <= 29000:\n", + " base_fee = 2.5\n", + " elif package.fst_size <= 165 and package.weight <= 39000:\n", + " base_fee = 4.5\n", + " if package.weight >= 29000:\n", + " other_fee1 = 17.8 # 超重费\n", + " order_type1 += '超重'\n", + " if package.fst_size > 95 or package.sed_size > 55 or package.trd_size > 55:\n", + " other_fee2 = 12.7 # 超长费\n", + " order_type1 += '超长'\n", + " elif package.fst_size <= 290 and package.weight <= 69000 and girth <= 410:\n", + " if package.weight <= 29000:\n", + " base_fee = (7 * 9 / 7) / 0.45\n", + " elif 29000 < package.weight <= 49000:\n", + " base_fee = (17.5 * 9 / 7) / 0.45\n", + " elif 49000 < package.weight <= 69000:\n", + " base_fee = (28 * 9 / 7) / 0.45\n", + " order_type1 += '大包裹'\n", + " else:\n", + " base_fee = 999999\n", + " express_fee += (base_fee + other_fee1 + other_fee2)\n", + " express_fee = round(express_fee, 2)\n", + "\n", + " # 卡派 主计费实重,辅计费抛重\n", + " order_type2 = '卡派'\n", + " sku_total_cubic_feet = 0\n", + " for package in packages:\n", + " cubic_feet= package.length * package.width * package.height / 6000\n", + " sku_total_cubic_feet += cubic_feet\n", + " if package.length >310:\n", + " return 999999,'包裹超尺寸'\n", + " ltl_fee = max(151/0.45 - 2.4 /7 * sku_total_cubic_feet,2.5) \n", + "\n", + " if express_fee <= ltl_fee:\n", + " order_fee = express_fee\n", + " order_type = order_type1\n", + " else:\n", + " order_fee = ltl_fee\n", + " order_type = order_type2\n", + " return round(order_fee,2), order_type\n" + ] + }, + { + "cell_type": "markdown", + "metadata": {}, + "source": [ + "计算2024版本售价" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [], + "source": [ + "from utils.Package import Package, Package_group\n", + "from sell.base_sell_price import SellPriceBase\n", + "import re\n", + "import json\n", + "def call_sell_price(price, packages):\n", + " if packages is None:\n", + " return 0\n", + " litfad = SellPriceBase.litfad(packages, price,1)\n", + " # 修改版本,网站售价\n", + " sell_price = litfad.cal_sell_price()\n", + " return sell_price" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": {}, + "outputs": [ + { + "name": "stderr", + "output_type": "stream", + "text": [ + " 0%| | 0/40 [00:00= %s \n", + " AND id <= %s\n", + " AND `状态` = '启用'\"\"\"\n", + " \n", + " with MySQLconnect('ads') as db:\n", + " for i in tqdm(range(110,150)):\n", + " print(i,\"开始\")\n", + " dfsql = sql % (i*100000, (i+1)*100000-1)\n", + " df = pd.read_sql(dfsql, db.engine())\n", + " if len(df) == 0:\n", + " continue\n", + " # 包裹数据格式化\n", + " df['erp_package_vol'] = df.apply(lambda x: to_package(x['erp_package_vol']), axis=1)\n", + " # df['售价'] = df.apply(lambda x: call_sell_and_order_price(x['成本价'], x['erp_package_vol'], \"海运\")[0], axis=1)\n", + " # df[['美国海运','美国海运类型']] = df.apply(lambda x: us_ocean_order_price(x['erp_package_vol']), axis=1, result_type='expand')\n", + " # df[['英国海运','英国海运类型']] = df.apply(lambda x: uk_ocean_order_price(x['erp_package_vol']), axis=1, result_type='expand')\n", + " \n", + " def process_row(x):\n", + " # 单行处理逻辑\n", + " try:\n", + "\n", + " sell_price = call_sell_price(x['成本价'], x['erp_package_vol'])\n", + " # print(\"售价计算完成\")\n", + " us_price, us_type = us_ocean_order_price(x['erp_package_vol'])\n", + " # print(\"美国海运计算完成\")\n", + " uk_price, uk_type = uk_ocean_order_price(x['erp_package_vol'])\n", + " # print(\"英国海运计算完成\")\n", + " return [round(sell_price, 2), us_price, us_type, uk_price, uk_type]\n", + " except Exception as e:\n", + " return [None, None, None, None, None]\n", + "\n", + " # 用 dict 更稳妥\n", + " rows = df.to_dict(orient='records') # 转为 list of dict\n", + " print(\"到这儿了\")\n", + " with ThreadPoolExecutor(max_workers=50) as executor:\n", + " results = list(executor.map(process_row, rows))\n", + " # 组装结果\n", + " print(\"到这儿了\")\n", + " result_df = pd.DataFrame(results, columns=['售价', '美国海运', '美国海运类型', '英国海运', '英国海运类型'])\n", + " df = pd.concat([df.reset_index(drop=True), result_df], axis=1)\n", + "\n", + " # 清空临时表\n", + " db.cur.execute(\"TRUNCATE TABLE ads.temp_sku_order_compare;\")\n", + " print(\"临时表清空完成\")\n", + "\n", + " # 组装需要输出的字段\n", + " df = df.rename(columns={\n", + " 'erp_package_vol':'包裹数据'\n", + " })\n", + "\n", + " columns_needed = ['SKU', '售价','包裹数据', '美国海运', '美国海运类型', '英国海运', '英国海运类型']\n", + " df_out = df[columns_needed]\n", + " # 写入当前批次数据\n", + " df_out.to_sql(\n", + " \"temp_sku_order_compare\",\n", + " db.eng,\n", + " if_exists='append',\n", + " index=False,\n", + " method='multi',\n", + " chunksize=500 # 分批写入\n", + " )\n", + "\n", + " print(\"当前批次数据写入完成\")\n", + " # 更新主表\n", + " update_sql = \"\"\"\n", + " REPLACE INTO ads.sku_order_compare SELECT * FROM ads.temp_sku_order_compare\n", + " \"\"\"\n", + " db.cur.execute(update_sql)\n", + " print(\"主表更新完成\")\n", + " db.con.commit()\n", + " print(i,\"结束\")" + ] } ], "metadata": { diff --git a/data/售价尾端价格.xlsx b/data/售价尾端价格.xlsx index 38f6ef5..d281753 100644 --- a/data/售价尾端价格.xlsx +++ b/data/售价尾端价格.xlsx @@ -1,3 +1,3 @@ version https://git-lfs.github.com/spec/v1 -oid sha256:113c7466854323029ab62c4a5dfb25858b37efd7a793d97724070a45f1f65628 -size 17380 +oid sha256:d43ece174359d76f917360321e087fb1a1c74145f5c4100fe287e34598ab959c +size 27737 diff --git a/logisticsClass/logisticsBaseClass.py b/logisticsClass/logisticsBaseClass.py index b87048e..d90522e 100644 --- a/logisticsClass/logisticsBaseClass.py +++ b/logisticsClass/logisticsBaseClass.py @@ -3,7 +3,7 @@ from enum import Enum class LogisticsType(Enum): EXPRESS = '快递' - COURIER = '卡派' + LTL = '卡派' OCEAN = '海运' AIR = '空运' class PortType(Enum): diff --git a/logisticsClass/logisticsTail_EUR.py b/logisticsClass/logisticsTail_EUR.py index 01dc775..a82b2cf 100644 --- a/logisticsClass/logisticsTail_EUR.py +++ b/logisticsClass/logisticsTail_EUR.py @@ -239,7 +239,7 @@ class KPASLLogistics(TailLogistics): """卡派—ASL""" company = "卡派-ASL" # 欧洲国家的卡派 currency = "EUR" - logistics_type = LogisticsType.COURIER + logistics_type = LogisticsType.LTL parent_current_directory = Path(__file__).parent.parent price_path = parent_current_directory.joinpath("data") @@ -304,7 +304,7 @@ class KPGELLogistics(TailLogistics): """ company = "卡派-GEL" # 欧洲国家的卡派 currency = "EUR" - logistics_type = LogisticsType.COURIER + logistics_type = LogisticsType.LTL parent_current_directory = Path(__file__).parent.parent price_path = parent_current_directory.joinpath("data") diff --git a/logisticsClass/logisticsTail_UK.py b/logisticsClass/logisticsTail_UK.py index aa46d8e..0bc3cad 100644 --- a/logisticsClass/logisticsTail_UK.py +++ b/logisticsClass/logisticsTail_UK.py @@ -152,7 +152,7 @@ class KPZGLogistics_UK(TailLogistics): country = 'United Kingdom' company = '海GB-卡派' currency = 'GBP' - logistics_type = LogisticsType.COURIER + logistics_type = LogisticsType.LTL def __init__(self): super().__init__() self.base_dice = { @@ -218,7 +218,7 @@ class KPNVlogistics_UK(TailLogistics): country = 'United Kingdom' company = '卡派-NV' currency = 'GBP' - logistics_type = LogisticsType.COURIER + logistics_type = LogisticsType.LTL parent_current_directory = Path(__file__).parent.parent price_path = parent_current_directory.joinpath("data") @@ -272,7 +272,7 @@ class KPNVlogistics_UK(TailLogistics): # country = 'United Kingdom' # company = 'DX-EL' # currency = 'GBP' -# logistics_type = LogisticsType.COURIER +# logistics_type = LogisticsType.LTL # def __init__(self): # super().__init__() # self.base_fee = 0 diff --git a/logisticsClass/logisticsTail_US.py b/logisticsClass/logisticsTail_US.py index 96124d8..382860a 100644 --- a/logisticsClass/logisticsTail_US.py +++ b/logisticsClass/logisticsTail_US.py @@ -362,7 +362,7 @@ class GIGALogistics_US(TailLogistics): country = "United States" country_code = "US" company = "大健-GIGA" - logistics_type = LogisticsType.COURIER + logistics_type = LogisticsType.LTL parent_current_directory = Path(__file__).parent.parent data_path = parent_current_directory.joinpath("data") @@ -410,7 +410,7 @@ class CEVALogistics_US(TailLogistics): country = "United States" country_code = "US" company = "大健-CEVA" - logistics_type = LogisticsType.COURIER + logistics_type = LogisticsType.LTL parent_current_directory = Path(__file__).parent.parent data_path = parent_current_directory.joinpath("data") @@ -503,7 +503,7 @@ class MetroLogistics_US(TailLogistics): country = "United States" country_code = "US" company = "Metro-SAIR" - logistics_type = LogisticsType.COURIER + logistics_type = LogisticsType.LTL parent_current_directory = Path(__file__).parent.parent data_path = parent_current_directory.joinpath("data") @@ -617,7 +617,7 @@ class XmilesLogistics_US(TailLogistics): country = "United States" country_code = "US" company = "XMILES-SAIR" - logistics_type = LogisticsType.COURIER + logistics_type = LogisticsType.LTL parent_current_directory = Path(__file__).parent.parent data_path = parent_current_directory.joinpath("data") @@ -700,7 +700,7 @@ class AMWestLogistics_US(TailLogistics): country = "United States" country_code = "US" company = "AM-美西" - logistics_type = LogisticsType.COURIER + logistics_type = LogisticsType.LTL parent_current_directory = Path(__file__).parent.parent data_path = parent_current_directory.joinpath("data") @@ -779,7 +779,7 @@ class AMEastLogistics_US(TailLogistics): country = "United States" country_code = "US" company = "AM-美东" - logistics_type = LogisticsType.COURIER + logistics_type = LogisticsType.LTL parent_current_directory = Path(__file__).parent.parent data_path = parent_current_directory.joinpath("data") diff --git a/sell/base_sell_price.py b/sell/base_sell_price.py index 14b1c9a..8ae0d0b 100644 --- a/sell/base_sell_price.py +++ b/sell/base_sell_price.py @@ -65,10 +65,13 @@ class SellPriceBase: price_path = parent_current_directory.joinpath("data") _price_files = price_path.joinpath("售价尾端价格.xlsx") df_2025 = None + df_2024 = None def __new__(cls, *args, **kwargs): """实现单例模式,只加载一次文件""" + if cls.df_2024 is None: + cls.df_2024 = pd.read_excel(cls._price_files,sheet_name="2024") if cls.df_2025 is None: - cls.df_2025 = pd.read_excel(cls._price_files,sheet_name="Sheet1") + cls.df_2025 = pd.read_excel(cls._price_files,sheet_name="2025") return super().__new__(cls) def __init__(self, packages, purchase_price, shipping_type, ocean_first_cny, ocean_first_usd, air_first_usd, air_cny_type, air_first_fix, exchange_rate, profit_rate, air_rate,tax_rate): @@ -85,26 +88,35 @@ class SellPriceBase: self.shipping_type = shipping_type # 1为海运0为空运,由spu定好的 self.tax_rate = tax_rate # 税率 - # 获取对应价格表 - def get_fee(self, head_type): - mat = MySQLconnect('mat') - engine = mat.engine() + + # def get_fee_df(self): + # if self.df_status == 0: + # self.df_status = 1 + # self.mat = MySQLconnect('mat') + # self.engine = self.mat.engine() + # self.adf = pd.read_sql('SELECT * FROM `usps_0814`', self.engine) + # self.bdf = pd.read_sql('SELECT * FROM `uandf_0814`', self.engine) + # self.cdf = pd.read_sql('SELECT * FROM `fedex_2504`', self.engine) + # # 获取对应价格表 + # def get_fee(self, head_type,adf,bdf,cdf): + # mat = MySQLconnect('mat') + # engine = mat.engine() - try: - if head_type == 0: - df = pd.read_sql('SELECT * FROM `usps_0814`', engine) - elif head_type == 1: - df = pd.read_sql('SELECT * FROM `uandf_0814`', engine) - elif head_type == 2: - df = pd.read_sql('SELECT * FROM `fedex_2504`', engine) - else: - df = pd.DataFrame([99999], columns=['错误']) - except Exception as e: - print(f"发生错误: {e}") - df = pd.DataFrame() - finally: - engine.dispose() - return df + # try: + # if head_type == 0: + # df = self.adf + # elif head_type == 1: + # df = self.bdf + # elif head_type == 2: + # df = self.cdf + # else: + # df = pd.DataFrame([99999], columns=['错误']) + # except Exception as e: + # print(f"发生错误: {e}") + # df = pd.DataFrame() + # finally: + # engine.dispose() + # return df # 计算快递费用 def cal_express_fee(self): @@ -136,22 +148,19 @@ class SellPriceBase: ahs_dimension = 0 if head_type == 0: try: - df = self.get_fee(head_type) - express_base_fee = df[df['oz'] == oz_weight]['最终费用'].iloc[0] / self.profit_rate + express_base_fee = self.df_2024[self.df_2024['oz'] == oz_weight]['最终费用'].iloc[0] / self.profit_rate except: head_type = 1 # USPSA2/FEDEXA1 if head_type == 1: try: - df = self.get_fee(head_type) - express_base_fee = df[df['lbs'] == lbs_weight]['加权价格'].iloc[0] / self.profit_rate + express_base_fee = self.df_2024[self.df_2024['lbs小'] == lbs_weight]['加权价格'].iloc[0] / self.profit_rate except: head_type = 2 # FEDEX if head_type == 2: try: - df = self.get_fee(head_type) - express_base_fee = df[df['lbs'] == lbs_weight]['售价尾端价格'].iloc[0] + express_base_fee = self.df_2024[self.df_2024['lbs大'] == lbs_weight]['售价尾端价格'].iloc[0] except: express_base_fee = 99999 head_type = 3 @@ -358,6 +367,22 @@ class SellPriceBase: @classmethod def litfad(cls, packages, purchase_price, shipping_type): + return cls( + packages, # 单sku包裹数据 + purchase_price, # 采购价/成本价 + shipping_type, # 1为海运0为空运,由spu定好的 + ocean_first_cny=1.077, #1.077, # 海运头程单价CNY + ocean_first_usd=1.06, #1.06, # 海运头程单价USD + air_first_usd=0.65, # 空运头程单价USD + air_cny_type=0.093, # 空运头程货型单价CNY + air_first_fix=27.7, # 空运头程固定单价CNY + exchange_rate=7, # 汇率 + profit_rate=0.45, #0.45, # 利润系数 + air_rate=0.7, # 空运分配占比 + tax_rate = 0 # 税率 + ) + @classmethod + def litfad_2025(cls, packages, purchase_price, shipping_type): return cls( packages, # 单sku包裹数据 purchase_price, # 采购价/成本价 diff --git a/sell/logistic_price/de_price.py b/sell/logistic_price/de_price.py new file mode 100644 index 0000000..e69de29 diff --git a/sell/logistic_price/uk_price.py b/sell/logistic_price/uk_price.py index 7a998b0..2c4caf8 100644 --- a/sell/logistic_price/uk_price.py +++ b/sell/logistic_price/uk_price.py @@ -20,7 +20,7 @@ def uk_ocean_order_price(packages,k): base_fee = 3.7/0.359*1.3-k-0.8*package.get_volume_weight(6000) else: base_fee = 999999 - if package.fst_size >=100 and package.sed_size >=60 and package.weight >=30000: + if package.fst_size >=100 or package.sed_size >=60 or package.weight >=30000: other_fee1 =45 order_type1 += '大包裹' diff --git a/sell/logistic_price/us_price.py b/sell/logistic_price/us_price.py index 36689e1..a1aab6c 100644 --- a/sell/logistic_price/us_price.py +++ b/sell/logistic_price/us_price.py @@ -19,7 +19,7 @@ def ocean_order_price(packages): express_type_length = '' for package in packages: for key, value in ocean_price_dict.items(): - if package.weight <=key: + if max(package.get_volume_weight(8.5), package.weight) <=key: express_fee+=value break if package.fst_size>=116 or package.sed_size>=71 or package.girth>=251: diff --git a/sell/sell_price.py b/sell/sell_price.py index 159dea4..c365161 100644 --- a/sell/sell_price.py +++ b/sell/sell_price.py @@ -1,3 +1,4 @@ +import json import math import sys sys.path.append(r'D:\test\logistics\sell') @@ -26,7 +27,7 @@ def ocean_order_price(packages): express_type_length = '' for package in packages: for key, value in ocean_price_dict.items(): - if package.weight <=key: + if max(package.get_volume_weight(8500)*1000, package.weight) <=key: express_fee+=value break if package.fst_size>=116 or package.sed_size>=71 or package.girth>=251: @@ -148,7 +149,7 @@ def call_sell_and_order_price(price, package_dict,head_type): # 提取字符串中的第一个数字 match = re.search(r"[-+]?\d*\.\d+|\d+", str(value)) return float(match.group()) if match else 0.0 - + package_dict = json.loads(package_dict) for key, package in package_dict.items(): package['长'] = extract_number(package['长']) package['宽'] = extract_number(package['宽']) @@ -163,7 +164,7 @@ def call_sell_and_order_price(price, package_dict,head_type): return (0,0,0,0),0,0 litfad = SellPriceBase.litfad(packages, price,1) # 修改版本,网站售价 - sell_price = litfad.cal_sell_price_2025() + sell_price = litfad.cal_sell_price() # 订单物流费 if "海" in head_type: order_price, order_type = ocean_order_price(packages) diff --git a/test.ipynb b/test.ipynb index 2ad2cad..126dbc8 100644 --- a/test.ipynb +++ b/test.ipynb @@ -259646,7 +259646,7 @@ }, { "cell_type": "code", - "execution_count": 6, + "execution_count": null, "metadata": {}, "outputs": [ { @@ -259966,8 +259966,7 @@ ], "source": [ "import pandas as pd\n", - "df=pd.read_clipboard()\n", - "df" + "df=pd.read_clipboard()" ] }, { @@ -259975,7 +259974,23 @@ "execution_count": null, "metadata": {}, "outputs": [], - "source": [] + "source": [ + "\n", + "D, E, F = [], [], []\n", + "temp_sum = 0\n", + "\n", + "for i in range(len(df)):\n", + " temp_sum += df.loc[i, '未售出']\n", + " if df.loc[i, '已售出'] != 0:\n", + " D.append(df.loc[i, '价格'])\n", + " E.append(temp_sum)\n", + " F.append(df.loc[i, '已售出'])\n", + " temp_sum = 0 # 重置\n", + "\n", + "# 结果\n", + "result = pd.DataFrame({'D': D, 'E': E, 'F': F})\n", + "result.to_clipboard(index=False)" + ] } ], "metadata": { diff --git a/utils/logisticsBill.py b/utils/logisticsBill.py index 0479166..cfb91f0 100644 --- a/utils/logisticsBill.py +++ b/utils/logisticsBill.py @@ -30,8 +30,10 @@ class Billing: self.items: List[BillItem] = [] # 存储账单项 self.volume_weight = 0 self.head_per = 0 + self.logistic_type = None self.add_items_from_operator() + def add_item(self, item: BillItem): """添加账单项""" self.items.append(item) @@ -41,7 +43,9 @@ class Billing: if self.packages is not None or self.postcode is not None: self.operator.set_packages_and_postcode(self.packages, self.postcode) # 设置包裹信息 company_name = self.company_name if self.company_name is not None else self.operator.get_min_company() + logistic_type = self.logistic_type if self.logistic_type is not None else self.operator.get_logistic_type(company_name) self.company_name = company_name + self.logistic_type = logistic_type # 获取头程费用 head_detail = self.operator.get_ocean_fee() if self.head_type == 1 else self.operator.get_air_fee() @@ -87,7 +91,7 @@ class Billing: if item.item_type == "尾程" and item.item_detail == "tail_amount": tailfee = item.amount_usd return headfee + tailfee - + def bill_dict(self): """返回账单字典""" result = {}