Update 道路通行时间预测.ipynb

pull/2/head
benjas 5 years ago
parent 9a26d7194c
commit 31d60c852e

@ -122,7 +122,7 @@
}
],
"source": [
"df = pd.read_csv('new_gy_contest_traveltime_training_data_second.txt',delimiter=';',dtype={'link_ID':object})\n",
"df = pd.read_csv('data/new_gy_contest_traveltime_training_data_second.txt',delimiter=';',dtype={'link_ID':object})\n",
"df.head()"
]
},
@ -229,7 +229,7 @@
}
],
"source": [
"link_df = pd.read_csv('gy_contest_link_info.txt',delimiter=';',dtype={'link_ID':object})\n",
"link_df = pd.read_csv('data/gy_contest_link_info.txt',delimiter=';',dtype={'link_ID':object})\n",
"link_df.head()"
]
},
@ -328,7 +328,7 @@
}
],
"source": [
"link_tops = pd.read_csv('gy_contest_link_top_update.txt',delimiter=',',dtype={'link_ID':object})\n",
"link_tops = pd.read_csv('data/gy_contest_link_top_update.txt',delimiter=',',dtype={'link_ID':object})\n",
"link_tops.head()"
]
},
@ -702,7 +702,7 @@
"outputs": [],
"source": [
"#保存处理结果\n",
"df.to_csv('raw_data.txt',header=True,index=None,sep=';',mode='w')"
"df.to_csv('data/raw_data.txt',header=True,index=None,sep=';',mode='w')"
]
},
{
@ -799,7 +799,7 @@
}
],
"source": [
"df = pd.read_csv('raw_data.txt',delimiter=';',parse_dates=['time_interval_begin'],dtype={'link_ID':object})\n",
"df = pd.read_csv('data/raw_data.txt',delimiter=';',parse_dates=['time_interval_begin'],dtype={'link_ID':object})\n",
"df.head()"
]
},
@ -1213,7 +1213,7 @@
"outputs": [],
"source": [
"#保存中间结果\n",
"df2.to_csv('pre_trainning.txt',header=True,index=None,sep=';',mode='w')"
"df2.to_csv('data/pre_trainning.txt',header=True,index=None,sep=';',mode='w')"
]
},
{
@ -2156,8 +2156,8 @@
"metadata": {},
"outputs": [],
"source": [
"link_infos = pd.read_csv('gy_contest_link_info.txt',delimiter=';',dtype={'link_ID':object})\n",
"link_tops = pd.read_csv('gy_contest_link_top_update.txt',delimiter=',',dtype={'link_ID':object})"
"link_infos = pd.read_csv('data/gy_contest_link_info.txt',delimiter=';',dtype={'link_ID':object})\n",
"link_tops = pd.read_csv('data/gy_contest_link_top_update.txt',delimiter=',',dtype={'link_ID':object})"
]
},
{
@ -3955,7 +3955,7 @@
],
"source": [
"print(df[['travel_time','prediction', 'travel_time2']].describe())\n",
"df[['link_ID','date','time_interval_begin','travel_time','imputation1']].to_csv('com_trainning.txt',\n",
"df[['link_ID','date','time_interval_begin','travel_time','imputation1']].to_csv('data/com_trainning.txt',\n",
" header=True,\n",
" index=None,\n",
" sep=';',mode='w')"
@ -3974,7 +3974,7 @@
"metadata": {},
"outputs": [],
"source": [
"df = pd.read_csv('com_trainning.txt',\n",
"df = pd.read_csv('data/com_trainning.txt',\n",
" delimiter=';',\n",
" parse_dates=['time_interval_begin'],\n",
" dtype={'link_ID':object})"
@ -4663,8 +4663,8 @@
"metadata": {},
"outputs": [],
"source": [
"link_infos = pd.read_csv('gy_contest_link_info.txt',delimiter=';',dtype={'link_ID':object})\n",
"link_tops = pd.read_csv('gy_contest_link_top_update.txt',delimiter=',',dtype={'link_ID':object})"
"link_infos = pd.read_csv('data/gy_contest_link_info.txt',delimiter=';',dtype={'link_ID':object})\n",
"link_tops = pd.read_csv('data/gy_contest_link_top_update.txt',delimiter=',',dtype={'link_ID':object})"
]
},
{
@ -5733,7 +5733,7 @@
"metadata": {},
"outputs": [],
"source": [
"df2.to_csv('trainning.txt',header=True,index=None,sep=';',mode='w')"
"df2.to_csv('data/trainning.txt',header=True,index=None,sep=';',mode='w')"
]
},
{

Loading…
Cancel
Save