|
@@ -61,14 +61,17 @@ def process_folder(path, all_records):
|
|
|
|
|
|
temp_df = df[[1, 5]].rename(columns={1: 'commodity', 5: 'import'})
|
|
|
temp_df['import'] = pd.to_numeric(temp_df['import'].replace('--', 0), errors='coerce')
|
|
|
+ temp_df['import'] = temp_df['import'] * 10
|
|
|
import_df = pd.concat([import_df, temp_df])
|
|
|
|
|
|
temp_df = df[[1, 3]].rename(columns={1: 'commodity', 3: 'export'})
|
|
|
temp_df['export'] = pd.to_numeric(temp_df['export'].replace('--', 0), errors='coerce')
|
|
|
+ temp_df['export'] = temp_df['export'] * 10
|
|
|
export_df = pd.concat([export_df, temp_df])
|
|
|
|
|
|
temp_df = df[[1, 2]].rename(columns={1: 'commodity', 2: 'total'})
|
|
|
temp_df['total'] = pd.to_numeric(temp_df['total'].replace('--', 0), errors='coerce')
|
|
|
+ temp_df['total'] = temp_df['total'] * 10
|
|
|
total_df = pd.concat([total_df, temp_df])
|
|
|
break
|
|
|
|
|
@@ -111,14 +114,11 @@ def save_to_database(import_df, export_df, total_df, year, month, all_records):
|
|
|
|
|
|
sql = (f"INSERT INTO t_yujin_crossborder_prov_commodity_trade "
|
|
|
f"(crossborder_year, crossborder_year_month, prov_code, prov_name, commodity_code, commodity_name, monthly_total, monthly_export, monthly_import, create_time, commodity_source) VALUES "
|
|
|
- f"('{year}', '{year_month}', '320000', '江苏省', '{commodity_code}', '{category_name}', {format_sql_value(monthly_total)}, {format_sql_value(monthly_export)}, {format_sql_value(monthly_import)}, now(), 1);")
|
|
|
+ f"('{year}', '{year_month}', '320000', '江苏省', '{commodity_code}', '{category_name}', {monthly_total}, {monthly_export}, {monthly_import}, now(), 1);")
|
|
|
sql_arr.append(sql)
|
|
|
|
|
|
processed_commodities.add(commodity_code)
|
|
|
- # except Exception as e:
|
|
|
- # print(f"{year_month} 生成SQL时发生异常: {str(e)}")
|
|
|
|
|
|
- # 原有SQL执行逻辑
|
|
|
print(f"√ {year_month} 成功生成SQL文件 size {len(sql_arr)} ")
|
|
|
base_mysql.bulk_insert(sql_arr)
|
|
|
print(f"√ {year_month} prov_commodity_trade SQL 存表完成!")
|