我想把dft中的4列全部数据,保存至瀚高数据库中,请帮我编写代码
参考我保存其他数据的模式,参考代码如下:
# 数据库操作
conn = create_connection() # 使用公共定义的数据库连接
cursor = conn.cursor()
db_config = get_db_config()
dbtype = db_config['dbtype']
try:
if batch_data:
sql = '' # 首先初始化一个空串
if dbtype == 'mysql':
sql = """
INSERT INTO station_wrf_info
(station_id, station_name, latitude, longitude, height, temperature,
relative_humidity, wind_speed, wind_direction, vertical_velocity_pressure, pressure, data_dt)
VALUES
(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
ON DUPLICATE KEY UPDATE
station_name = VALUES(station_name),
latitude = VALUES(latitude),
longitude = VALUES(longitude),
height = VALUES(height),
temperature = VALUES(temperature),
relative_humidity = VALUES(relative_humidity),
wind_speed = VALUES(wind_speed),
wind_direction = VALUES(wind_direction),
vertical_velocity_pressure = VALUES(vertical_velocity_pressure),
pressure = VALUES(pressure),
data_dt = VALUES(data_dt)
"""
elif dbtype == 'highgo':
sql = """
INSERT INTO station_wrf_info
(station_id, station_name, latitude, longitude, height, temperature,
relative_humidity, wind_speed, wind_direction, vertical_velocity_pressure, pressure, data_dt)
VALUES
(%s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s, %s)
ON CONFLICT (station_id, data_dt,height) DO nothing
"""
# 批量插入
batch_size = 1000
for i in range(0, len(batch_data), batch_size):
cursor.executemany(sql, batch_data[i:i + batch_size])
conn.commit()
logger.info(f"成功处理 {len(batch_data)} 条数据(插入或更新)!")
except Exception as e:
logger.info(f"Upsert 数据库时出错:{e}")
conn.rollback()
finally:
cursor.close()
conn.close()
最新发布