From 31d60c852e0b03065c72fafadfc778947d299ae4 Mon Sep 17 00:00:00 2001 From: benjas <909336740@qq.com> Date: Thu, 17 Dec 2020 09:23:05 +0800 Subject: [PATCH] =?UTF-8?q?Update=20=E9=81=93=E8=B7=AF=E9=80=9A=E8=A1=8C?= =?UTF-8?q?=E6=97=B6=E9=97=B4=E9=A2=84=E6=B5=8B.ipynb?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../道路通行时间预测.ipynb | 26 +++++++++---------- 1 file changed, 13 insertions(+), 13 deletions(-) diff --git a/机器学习竞赛实战_优胜解决方案/智慧城市-道路通行时间预测/道路通行时间预测.ipynb b/机器学习竞赛实战_优胜解决方案/智慧城市-道路通行时间预测/道路通行时间预测.ipynb index b3e8c37..c61e617 100644 --- a/机器学习竞赛实战_优胜解决方案/智慧城市-道路通行时间预测/道路通行时间预测.ipynb +++ b/机器学习竞赛实战_优胜解决方案/智慧城市-道路通行时间预测/道路通行时间预测.ipynb @@ -122,7 +122,7 @@ } ], "source": [ - "df = pd.read_csv('new_gy_contest_traveltime_training_data_second.txt',delimiter=';',dtype={'link_ID':object})\n", + "df = pd.read_csv('data/new_gy_contest_traveltime_training_data_second.txt',delimiter=';',dtype={'link_ID':object})\n", "df.head()" ] }, @@ -229,7 +229,7 @@ } ], "source": [ - "link_df = pd.read_csv('gy_contest_link_info.txt',delimiter=';',dtype={'link_ID':object})\n", + "link_df = pd.read_csv('data/gy_contest_link_info.txt',delimiter=';',dtype={'link_ID':object})\n", "link_df.head()" ] }, @@ -328,7 +328,7 @@ } ], "source": [ - "link_tops = pd.read_csv('gy_contest_link_top_update.txt',delimiter=',',dtype={'link_ID':object})\n", + "link_tops = pd.read_csv('data/gy_contest_link_top_update.txt',delimiter=',',dtype={'link_ID':object})\n", "link_tops.head()" ] }, @@ -702,7 +702,7 @@ "outputs": [], "source": [ "#保存处理结果\n", - "df.to_csv('raw_data.txt',header=True,index=None,sep=';',mode='w')" + "df.to_csv('data/raw_data.txt',header=True,index=None,sep=';',mode='w')" ] }, { @@ -799,7 +799,7 @@ } ], "source": [ - "df = pd.read_csv('raw_data.txt',delimiter=';',parse_dates=['time_interval_begin'],dtype={'link_ID':object})\n", + "df = pd.read_csv('data/raw_data.txt',delimiter=';',parse_dates=['time_interval_begin'],dtype={'link_ID':object})\n", "df.head()" ] }, @@ -1213,7 +1213,7 @@ "outputs": [], "source": [ "#保存中间结果\n", - "df2.to_csv('pre_trainning.txt',header=True,index=None,sep=';',mode='w')" + "df2.to_csv('data/pre_trainning.txt',header=True,index=None,sep=';',mode='w')" ] }, { @@ -2156,8 +2156,8 @@ "metadata": {}, "outputs": [], "source": [ - "link_infos = pd.read_csv('gy_contest_link_info.txt',delimiter=';',dtype={'link_ID':object})\n", - "link_tops = pd.read_csv('gy_contest_link_top_update.txt',delimiter=',',dtype={'link_ID':object})" + "link_infos = pd.read_csv('data/gy_contest_link_info.txt',delimiter=';',dtype={'link_ID':object})\n", + "link_tops = pd.read_csv('data/gy_contest_link_top_update.txt',delimiter=',',dtype={'link_ID':object})" ] }, { @@ -3955,7 +3955,7 @@ ], "source": [ "print(df[['travel_time','prediction', 'travel_time2']].describe())\n", - "df[['link_ID','date','time_interval_begin','travel_time','imputation1']].to_csv('com_trainning.txt',\n", + "df[['link_ID','date','time_interval_begin','travel_time','imputation1']].to_csv('data/com_trainning.txt',\n", " header=True,\n", " index=None,\n", " sep=';',mode='w')" @@ -3974,7 +3974,7 @@ "metadata": {}, "outputs": [], "source": [ - "df = pd.read_csv('com_trainning.txt',\n", + "df = pd.read_csv('data/com_trainning.txt',\n", " delimiter=';',\n", " parse_dates=['time_interval_begin'],\n", " dtype={'link_ID':object})" @@ -4663,8 +4663,8 @@ "metadata": {}, "outputs": [], "source": [ - "link_infos = pd.read_csv('gy_contest_link_info.txt',delimiter=';',dtype={'link_ID':object})\n", - "link_tops = pd.read_csv('gy_contest_link_top_update.txt',delimiter=',',dtype={'link_ID':object})" + "link_infos = pd.read_csv('data/gy_contest_link_info.txt',delimiter=';',dtype={'link_ID':object})\n", + "link_tops = pd.read_csv('data/gy_contest_link_top_update.txt',delimiter=',',dtype={'link_ID':object})" ] }, { @@ -5733,7 +5733,7 @@ "metadata": {}, "outputs": [], "source": [ - "df2.to_csv('trainning.txt',header=True,index=None,sep=';',mode='w')" + "df2.to_csv('data/trainning.txt',header=True,index=None,sep=';',mode='w')" ] }, {