Created
May 3, 2026 13:03
-
-
Save jaiswalakshay508-maker/dedfec63ee213f0e363ba2872485709f to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| { | |
| "cells": [ | |
| { | |
| "cell_type": "code", | |
| "id": "initial_id", | |
| "metadata": { | |
| "collapsed": true, | |
| "ExecuteTime": { | |
| "end_time": "2025-12-14T10:26:10.076853Z", | |
| "start_time": "2025-12-14T10:26:02.169768Z" | |
| } | |
| }, | |
| "source": "import pandas as pd", | |
| "outputs": [], | |
| "execution_count": 1 | |
| }, | |
| { | |
| "metadata": { | |
| "ExecuteTime": { | |
| "end_time": "2025-12-05T12:57:25.372074Z", | |
| "start_time": "2025-12-05T12:57:25.281064Z" | |
| } | |
| }, | |
| "cell_type": "code", | |
| "source": [ | |
| "df=pd.read_csv('expense.csv')\n", | |
| "df" | |
| ], | |
| "id": "dbe5c9ccbddf7bdc", | |
| "outputs": [ | |
| { | |
| "data": { | |
| "text/plain": [ | |
| " date amount category\n", | |
| "0 03/01/2024 520 Groceries\n", | |
| "1 07/01/2024 150 Transport\n", | |
| "2 15/01/2024 980 Electronics\n", | |
| "3 28/01/2024 220 Dining\n", | |
| "4 05/02/2024 430 Rent\n", | |
| "5 11/02/2024 85 Groceries\n", | |
| "6 19/02/2024 60 Snacks\n", | |
| "7 03/03/2024 255 Medical\n", | |
| "8 18/03/2024 145 Transport\n", | |
| "9 25/03/2024 600 Groceries\n", | |
| "10 04/04/2024 210 Utilities\n", | |
| "11 22/04/2024 140 Snacks\n", | |
| "12 01/05/2024 700 Rent\n", | |
| "13 13/05/2024 475 Groceries\n", | |
| "14 29/05/2024 310 Dining\n", | |
| "15 08/06/2024 260 Transport\n", | |
| "16 21/06/2024 120 Snacks\n", | |
| "17 30/06/2024 150 Entertainment\n", | |
| "18 07/07/2024 510 Groceries\n", | |
| "19 19/07/2024 210 Utilities\n", | |
| "20 04/08/2024 130 Transport\n", | |
| "21 26/08/2024 880 Electronics\n", | |
| "22 06/09/2024 900 Rent\n", | |
| "23 14/09/2024 345 Groceries\n", | |
| "24 25/09/2024 90 Snacks\n", | |
| "25 03/10/2024 240 Medical\n", | |
| "26 17/10/2024 190 Transport\n", | |
| "27 29/10/2024 160 Dining\n", | |
| "28 05/11/2024 520 Groceries\n", | |
| "29 22/11/2024 210 Utilities\n", | |
| "30 08/12/2024 150 Snacks\n", | |
| "31 20/12/2024 105 Transport\n", | |
| "32 04/01/2025 540 Groceries\n", | |
| "33 15/01/2025 985 Electronics\n", | |
| "34 28/01/2025 260 Dining\n", | |
| "35 10/02/2025 900 Rent\n", | |
| "36 19/02/2025 180 Transport\n", | |
| "37 27/02/2025 115 Snacks\n", | |
| "38 09/03/2025 620 Groceries\n", | |
| "39 22/03/2025 145 Transport\n", | |
| "40 02/04/2025 210 Utilities\n", | |
| "41 26/04/2025 345 Medical\n", | |
| "42 03/05/2025 705 Rent\n", | |
| "43 19/05/2025 320 Groceries\n", | |
| "44 01/06/2025 155 Snacks\n", | |
| "45 18/06/2025 245 Transport\n", | |
| "46 05/07/2025 520 Groceries\n", | |
| "47 23/07/2025 210 Utilities\n", | |
| "48 09/08/2025 130 Entertainment\n", | |
| "49 27/08/2025 890 Electronics\n", | |
| "50 07/09/2025 950 Rent\n", | |
| "51 18/09/2025 310 Groceries\n", | |
| "52 29/09/2025 95 Snacks\n", | |
| "53 04/10/2025 235 Medical\n", | |
| "54 21/10/2025 190 Transport\n", | |
| "55 01/11/2025 540 Groceries\n", | |
| "56 19/11/2025 210 Dining\n", | |
| "57 08/12/2025 160 Snacks\n", | |
| "58 22/12/2025 110 Transport" | |
| ], | |
| "text/html": [ | |
| "<div>\n", | |
| "<style scoped>\n", | |
| " .dataframe tbody tr th:only-of-type {\n", | |
| " vertical-align: middle;\n", | |
| " }\n", | |
| "\n", | |
| " .dataframe tbody tr th {\n", | |
| " vertical-align: top;\n", | |
| " }\n", | |
| "\n", | |
| " .dataframe thead th {\n", | |
| " text-align: right;\n", | |
| " }\n", | |
| "</style>\n", | |
| "<table border=\"1\" class=\"dataframe\">\n", | |
| " <thead>\n", | |
| " <tr style=\"text-align: right;\">\n", | |
| " <th></th>\n", | |
| " <th>date</th>\n", | |
| " <th>amount</th>\n", | |
| " <th>category</th>\n", | |
| " </tr>\n", | |
| " </thead>\n", | |
| " <tbody>\n", | |
| " <tr>\n", | |
| " <th>0</th>\n", | |
| " <td>03/01/2024</td>\n", | |
| " <td>520</td>\n", | |
| " <td>Groceries</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>1</th>\n", | |
| " <td>07/01/2024</td>\n", | |
| " <td>150</td>\n", | |
| " <td>Transport</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>2</th>\n", | |
| " <td>15/01/2024</td>\n", | |
| " <td>980</td>\n", | |
| " <td>Electronics</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>3</th>\n", | |
| " <td>28/01/2024</td>\n", | |
| " <td>220</td>\n", | |
| " <td>Dining</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>4</th>\n", | |
| " <td>05/02/2024</td>\n", | |
| " <td>430</td>\n", | |
| " <td>Rent</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>5</th>\n", | |
| " <td>11/02/2024</td>\n", | |
| " <td>85</td>\n", | |
| " <td>Groceries</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>6</th>\n", | |
| " <td>19/02/2024</td>\n", | |
| " <td>60</td>\n", | |
| " <td>Snacks</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>7</th>\n", | |
| " <td>03/03/2024</td>\n", | |
| " <td>255</td>\n", | |
| " <td>Medical</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>8</th>\n", | |
| " <td>18/03/2024</td>\n", | |
| " <td>145</td>\n", | |
| " <td>Transport</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>9</th>\n", | |
| " <td>25/03/2024</td>\n", | |
| " <td>600</td>\n", | |
| " <td>Groceries</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>10</th>\n", | |
| " <td>04/04/2024</td>\n", | |
| " <td>210</td>\n", | |
| " <td>Utilities</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>11</th>\n", | |
| " <td>22/04/2024</td>\n", | |
| " <td>140</td>\n", | |
| " <td>Snacks</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>12</th>\n", | |
| " <td>01/05/2024</td>\n", | |
| " <td>700</td>\n", | |
| " <td>Rent</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>13</th>\n", | |
| " <td>13/05/2024</td>\n", | |
| " <td>475</td>\n", | |
| " <td>Groceries</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>14</th>\n", | |
| " <td>29/05/2024</td>\n", | |
| " <td>310</td>\n", | |
| " <td>Dining</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>15</th>\n", | |
| " <td>08/06/2024</td>\n", | |
| " <td>260</td>\n", | |
| " <td>Transport</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>16</th>\n", | |
| " <td>21/06/2024</td>\n", | |
| " <td>120</td>\n", | |
| " <td>Snacks</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>17</th>\n", | |
| " <td>30/06/2024</td>\n", | |
| " <td>150</td>\n", | |
| " <td>Entertainment</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>18</th>\n", | |
| " <td>07/07/2024</td>\n", | |
| " <td>510</td>\n", | |
| " <td>Groceries</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>19</th>\n", | |
| " <td>19/07/2024</td>\n", | |
| " <td>210</td>\n", | |
| " <td>Utilities</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>20</th>\n", | |
| " <td>04/08/2024</td>\n", | |
| " <td>130</td>\n", | |
| " <td>Transport</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>21</th>\n", | |
| " <td>26/08/2024</td>\n", | |
| " <td>880</td>\n", | |
| " <td>Electronics</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>22</th>\n", | |
| " <td>06/09/2024</td>\n", | |
| " <td>900</td>\n", | |
| " <td>Rent</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>23</th>\n", | |
| " <td>14/09/2024</td>\n", | |
| " <td>345</td>\n", | |
| " <td>Groceries</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>24</th>\n", | |
| " <td>25/09/2024</td>\n", | |
| " <td>90</td>\n", | |
| " <td>Snacks</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>25</th>\n", | |
| " <td>03/10/2024</td>\n", | |
| " <td>240</td>\n", | |
| " <td>Medical</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>26</th>\n", | |
| " <td>17/10/2024</td>\n", | |
| " <td>190</td>\n", | |
| " <td>Transport</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>27</th>\n", | |
| " <td>29/10/2024</td>\n", | |
| " <td>160</td>\n", | |
| " <td>Dining</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>28</th>\n", | |
| " <td>05/11/2024</td>\n", | |
| " <td>520</td>\n", | |
| " <td>Groceries</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>29</th>\n", | |
| " <td>22/11/2024</td>\n", | |
| " <td>210</td>\n", | |
| " <td>Utilities</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>30</th>\n", | |
| " <td>08/12/2024</td>\n", | |
| " <td>150</td>\n", | |
| " <td>Snacks</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>31</th>\n", | |
| " <td>20/12/2024</td>\n", | |
| " <td>105</td>\n", | |
| " <td>Transport</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>32</th>\n", | |
| " <td>04/01/2025</td>\n", | |
| " <td>540</td>\n", | |
| " <td>Groceries</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>33</th>\n", | |
| " <td>15/01/2025</td>\n", | |
| " <td>985</td>\n", | |
| " <td>Electronics</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>34</th>\n", | |
| " <td>28/01/2025</td>\n", | |
| " <td>260</td>\n", | |
| " <td>Dining</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>35</th>\n", | |
| " <td>10/02/2025</td>\n", | |
| " <td>900</td>\n", | |
| " <td>Rent</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>36</th>\n", | |
| " <td>19/02/2025</td>\n", | |
| " <td>180</td>\n", | |
| " <td>Transport</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>37</th>\n", | |
| " <td>27/02/2025</td>\n", | |
| " <td>115</td>\n", | |
| " <td>Snacks</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>38</th>\n", | |
| " <td>09/03/2025</td>\n", | |
| " <td>620</td>\n", | |
| " <td>Groceries</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>39</th>\n", | |
| " <td>22/03/2025</td>\n", | |
| " <td>145</td>\n", | |
| " <td>Transport</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>40</th>\n", | |
| " <td>02/04/2025</td>\n", | |
| " <td>210</td>\n", | |
| " <td>Utilities</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>41</th>\n", | |
| " <td>26/04/2025</td>\n", | |
| " <td>345</td>\n", | |
| " <td>Medical</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>42</th>\n", | |
| " <td>03/05/2025</td>\n", | |
| " <td>705</td>\n", | |
| " <td>Rent</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>43</th>\n", | |
| " <td>19/05/2025</td>\n", | |
| " <td>320</td>\n", | |
| " <td>Groceries</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>44</th>\n", | |
| " <td>01/06/2025</td>\n", | |
| " <td>155</td>\n", | |
| " <td>Snacks</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>45</th>\n", | |
| " <td>18/06/2025</td>\n", | |
| " <td>245</td>\n", | |
| " <td>Transport</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>46</th>\n", | |
| " <td>05/07/2025</td>\n", | |
| " <td>520</td>\n", | |
| " <td>Groceries</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>47</th>\n", | |
| " <td>23/07/2025</td>\n", | |
| " <td>210</td>\n", | |
| " <td>Utilities</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>48</th>\n", | |
| " <td>09/08/2025</td>\n", | |
| " <td>130</td>\n", | |
| " <td>Entertainment</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>49</th>\n", | |
| " <td>27/08/2025</td>\n", | |
| " <td>890</td>\n", | |
| " <td>Electronics</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>50</th>\n", | |
| " <td>07/09/2025</td>\n", | |
| " <td>950</td>\n", | |
| " <td>Rent</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>51</th>\n", | |
| " <td>18/09/2025</td>\n", | |
| " <td>310</td>\n", | |
| " <td>Groceries</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>52</th>\n", | |
| " <td>29/09/2025</td>\n", | |
| " <td>95</td>\n", | |
| " <td>Snacks</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>53</th>\n", | |
| " <td>04/10/2025</td>\n", | |
| " <td>235</td>\n", | |
| " <td>Medical</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>54</th>\n", | |
| " <td>21/10/2025</td>\n", | |
| " <td>190</td>\n", | |
| " <td>Transport</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>55</th>\n", | |
| " <td>01/11/2025</td>\n", | |
| " <td>540</td>\n", | |
| " <td>Groceries</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>56</th>\n", | |
| " <td>19/11/2025</td>\n", | |
| " <td>210</td>\n", | |
| " <td>Dining</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>57</th>\n", | |
| " <td>08/12/2025</td>\n", | |
| " <td>160</td>\n", | |
| " <td>Snacks</td>\n", | |
| " </tr>\n", | |
| " <tr>\n", | |
| " <th>58</th>\n", | |
| " <td>22/12/2025</td>\n", | |
| " <td>110</td>\n", | |
| " <td>Transport</td>\n", | |
| " </tr>\n", | |
| " </tbody>\n", | |
| "</table>\n", | |
| "</div>" | |
| ] | |
| }, | |
| "execution_count": 3, | |
| "metadata": {}, | |
| "output_type": "execute_result" | |
| } | |
| ], | |
| "execution_count": 3 | |
| }, | |
| { | |
| "metadata": { | |
| "ExecuteTime": { | |
| "end_time": "2025-12-05T12:59:43.180972Z", | |
| "start_time": "2025-12-05T12:59:42.901985Z" | |
| } | |
| }, | |
| "cell_type": "code", | |
| "source": [ | |
| "df['date']=pd.to_datetime(df['date'],format='%d/%m/$Y')\n", | |
| "df.head()" | |
| ], | |
| "id": "72ff88ed88ec2845", | |
| "outputs": [ | |
| { | |
| "ename": "ValueError", | |
| "evalue": "time data \"03/01/2024\" doesn't match format \"%d/%m/$Y\", at position 0. You might want to try:\n - passing `format` if your strings have a consistent format;\n - passing `format='ISO8601'` if your strings are all ISO8601 but not necessarily in exactly the same format;\n - passing `format='mixed'`, and the format will be inferred for each element individually. You might want to use `dayfirst` alongside this.", | |
| "output_type": "error", | |
| "traceback": [ | |
| "\u001B[31m---------------------------------------------------------------------------\u001B[39m", | |
| "\u001B[31mValueError\u001B[39m Traceback (most recent call last)", | |
| "\u001B[36mCell\u001B[39m\u001B[36m \u001B[39m\u001B[32mIn[5]\u001B[39m\u001B[32m, line 1\u001B[39m\n\u001B[32m----> \u001B[39m\u001B[32m1\u001B[39m df[\u001B[33m'\u001B[39m\u001B[33mdate\u001B[39m\u001B[33m'\u001B[39m]=\u001B[43mpd\u001B[49m\u001B[43m.\u001B[49m\u001B[43mto_datetime\u001B[49m\u001B[43m(\u001B[49m\u001B[43mdf\u001B[49m\u001B[43m[\u001B[49m\u001B[33;43m'\u001B[39;49m\u001B[33;43mdate\u001B[39;49m\u001B[33;43m'\u001B[39;49m\u001B[43m]\u001B[49m\u001B[43m,\u001B[49m\u001B[38;5;28;43mformat\u001B[39;49m\u001B[43m=\u001B[49m\u001B[33;43m'\u001B[39;49m\u001B[38;5;132;43;01m%d\u001B[39;49;00m\u001B[33;43m/\u001B[39;49m\u001B[33;43m%\u001B[39;49m\u001B[33;43mm/$Y\u001B[39;49m\u001B[33;43m'\u001B[39;49m\u001B[43m)\u001B[49m\n\u001B[32m 2\u001B[39m df.head()\n", | |
| "\u001B[36mFile \u001B[39m\u001B[32m~\\PycharmProjects\\JupyterProject\\.venv\\Lib\\site-packages\\pandas\\core\\tools\\datetimes.py:1072\u001B[39m, in \u001B[36mto_datetime\u001B[39m\u001B[34m(arg, errors, dayfirst, yearfirst, utc, format, exact, unit, infer_datetime_format, origin, cache)\u001B[39m\n\u001B[32m 1070\u001B[39m result = arg.map(cache_array)\n\u001B[32m 1071\u001B[39m \u001B[38;5;28;01melse\u001B[39;00m:\n\u001B[32m-> \u001B[39m\u001B[32m1072\u001B[39m values = \u001B[43mconvert_listlike\u001B[49m\u001B[43m(\u001B[49m\u001B[43marg\u001B[49m\u001B[43m.\u001B[49m\u001B[43m_values\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[38;5;28;43mformat\u001B[39;49m\u001B[43m)\u001B[49m\n\u001B[32m 1073\u001B[39m result = arg._constructor(values, index=arg.index, name=arg.name)\n\u001B[32m 1074\u001B[39m \u001B[38;5;28;01melif\u001B[39;00m \u001B[38;5;28misinstance\u001B[39m(arg, (ABCDataFrame, abc.MutableMapping)):\n", | |
| "\u001B[36mFile \u001B[39m\u001B[32m~\\PycharmProjects\\JupyterProject\\.venv\\Lib\\site-packages\\pandas\\core\\tools\\datetimes.py:435\u001B[39m, in \u001B[36m_convert_listlike_datetimes\u001B[39m\u001B[34m(arg, format, name, utc, unit, errors, dayfirst, yearfirst, exact)\u001B[39m\n\u001B[32m 433\u001B[39m \u001B[38;5;66;03m# `format` could be inferred, or user didn't ask for mixed-format parsing.\u001B[39;00m\n\u001B[32m 434\u001B[39m \u001B[38;5;28;01mif\u001B[39;00m \u001B[38;5;28mformat\u001B[39m \u001B[38;5;129;01mis\u001B[39;00m \u001B[38;5;129;01mnot\u001B[39;00m \u001B[38;5;28;01mNone\u001B[39;00m \u001B[38;5;129;01mand\u001B[39;00m \u001B[38;5;28mformat\u001B[39m != \u001B[33m\"\u001B[39m\u001B[33mmixed\u001B[39m\u001B[33m\"\u001B[39m:\n\u001B[32m--> \u001B[39m\u001B[32m435\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[43m_array_strptime_with_fallback\u001B[49m\u001B[43m(\u001B[49m\u001B[43marg\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mname\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mutc\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[38;5;28;43mformat\u001B[39;49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mexact\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43merrors\u001B[49m\u001B[43m)\u001B[49m\n\u001B[32m 437\u001B[39m result, tz_parsed = objects_to_datetime64(\n\u001B[32m 438\u001B[39m arg,\n\u001B[32m 439\u001B[39m dayfirst=dayfirst,\n\u001B[32m (...)\u001B[39m\u001B[32m 443\u001B[39m allow_object=\u001B[38;5;28;01mTrue\u001B[39;00m,\n\u001B[32m 444\u001B[39m )\n\u001B[32m 446\u001B[39m \u001B[38;5;28;01mif\u001B[39;00m tz_parsed \u001B[38;5;129;01mis\u001B[39;00m \u001B[38;5;129;01mnot\u001B[39;00m \u001B[38;5;28;01mNone\u001B[39;00m:\n\u001B[32m 447\u001B[39m \u001B[38;5;66;03m# We can take a shortcut since the datetime64 numpy array\u001B[39;00m\n\u001B[32m 448\u001B[39m \u001B[38;5;66;03m# is in UTC\u001B[39;00m\n", | |
| "\u001B[36mFile \u001B[39m\u001B[32m~\\PycharmProjects\\JupyterProject\\.venv\\Lib\\site-packages\\pandas\\core\\tools\\datetimes.py:469\u001B[39m, in \u001B[36m_array_strptime_with_fallback\u001B[39m\u001B[34m(arg, name, utc, fmt, exact, errors)\u001B[39m\n\u001B[32m 458\u001B[39m \u001B[38;5;28;01mdef\u001B[39;00m\u001B[38;5;250m \u001B[39m\u001B[34m_array_strptime_with_fallback\u001B[39m(\n\u001B[32m 459\u001B[39m arg,\n\u001B[32m 460\u001B[39m name,\n\u001B[32m (...)\u001B[39m\u001B[32m 464\u001B[39m errors: \u001B[38;5;28mstr\u001B[39m,\n\u001B[32m 465\u001B[39m ) -> Index:\n\u001B[32m 466\u001B[39m \u001B[38;5;250m \u001B[39m\u001B[33;03m\"\"\"\u001B[39;00m\n\u001B[32m 467\u001B[39m \u001B[33;03m Call array_strptime, with fallback behavior depending on 'errors'.\u001B[39;00m\n\u001B[32m 468\u001B[39m \u001B[33;03m \"\"\"\u001B[39;00m\n\u001B[32m--> \u001B[39m\u001B[32m469\u001B[39m result, tz_out = \u001B[43marray_strptime\u001B[49m\u001B[43m(\u001B[49m\u001B[43marg\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mfmt\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mexact\u001B[49m\u001B[43m=\u001B[49m\u001B[43mexact\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43merrors\u001B[49m\u001B[43m=\u001B[49m\u001B[43merrors\u001B[49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mutc\u001B[49m\u001B[43m=\u001B[49m\u001B[43mutc\u001B[49m\u001B[43m)\u001B[49m\n\u001B[32m 470\u001B[39m \u001B[38;5;28;01mif\u001B[39;00m tz_out \u001B[38;5;129;01mis\u001B[39;00m \u001B[38;5;129;01mnot\u001B[39;00m \u001B[38;5;28;01mNone\u001B[39;00m:\n\u001B[32m 471\u001B[39m unit = np.datetime_data(result.dtype)[\u001B[32m0\u001B[39m]\n", | |
| "\u001B[36mFile \u001B[39m\u001B[32mpandas/_libs/tslibs/strptime.pyx:501\u001B[39m, in \u001B[36mpandas._libs.tslibs.strptime.array_strptime\u001B[39m\u001B[34m()\u001B[39m\n", | |
| "\u001B[36mFile \u001B[39m\u001B[32mpandas/_libs/tslibs/strptime.pyx:451\u001B[39m, in \u001B[36mpandas._libs.tslibs.strptime.array_strptime\u001B[39m\u001B[34m()\u001B[39m\n", | |
| "\u001B[36mFile \u001B[39m\u001B[32mpandas/_libs/tslibs/strptime.pyx:583\u001B[39m, in \u001B[36mpandas._libs.tslibs.strptime._parse_with_format\u001B[39m\u001B[34m()\u001B[39m\n", | |
| "\u001B[31mValueError\u001B[39m: time data \"03/01/2024\" doesn't match format \"%d/%m/$Y\", at position 0. You might want to try:\n - passing `format` if your strings have a consistent format;\n - passing `format='ISO8601'` if your strings are all ISO8601 but not necessarily in exactly the same format;\n - passing `format='mixed'`, and the format will be inferred for each element individually. You might want to use `dayfirst` alongside this." | |
| ] | |
| } | |
| ], | |
| "execution_count": 5 | |
| }, | |
| { | |
| "metadata": { | |
| "ExecuteTime": { | |
| "end_time": "2025-12-05T13:04:10.590074Z", | |
| "start_time": "2025-12-05T13:04:09.567992Z" | |
| } | |
| }, | |
| "cell_type": "code", | |
| "source": [ | |
| "df['month']=df['date'].dt.month\n", | |
| "df['year']=df['date'].dt.year\n", | |
| "df['day']=df['date'].dt.day\n", | |
| "df['day name']=df['date'].dt.day_name()\n", | |
| "df['month_name']=df['date'].dt.month_name()\n", | |
| "df" | |
| ], | |
| "id": "8944883b5cf047a8", | |
| "outputs": [ | |
| { | |
| "ename": "AttributeError", | |
| "evalue": "Can only use .dt accessor with datetimelike values", | |
| "output_type": "error", | |
| "traceback": [ | |
| "\u001B[31m---------------------------------------------------------------------------\u001B[39m", | |
| "\u001B[31mAttributeError\u001B[39m Traceback (most recent call last)", | |
| "\u001B[36mCell\u001B[39m\u001B[36m \u001B[39m\u001B[32mIn[6]\u001B[39m\u001B[32m, line 1\u001B[39m\n\u001B[32m----> \u001B[39m\u001B[32m1\u001B[39m df[\u001B[33m'\u001B[39m\u001B[33mmonth\u001B[39m\u001B[33m'\u001B[39m]=\u001B[43mdf\u001B[49m\u001B[43m[\u001B[49m\u001B[33;43m'\u001B[39;49m\u001B[33;43mdate\u001B[39;49m\u001B[33;43m'\u001B[39;49m\u001B[43m]\u001B[49m\u001B[43m.\u001B[49m\u001B[43mdt\u001B[49m.month\n\u001B[32m 2\u001B[39m df[\u001B[33m'\u001B[39m\u001B[33myear\u001B[39m\u001B[33m'\u001B[39m]=df[\u001B[33m'\u001B[39m\u001B[33mdate\u001B[39m\u001B[33m'\u001B[39m].dt.year\n\u001B[32m 3\u001B[39m df[\u001B[33m'\u001B[39m\u001B[33mday\u001B[39m\u001B[33m'\u001B[39m]=df[\u001B[33m'\u001B[39m\u001B[33mdate\u001B[39m\u001B[33m'\u001B[39m].dt.day\n", | |
| "\u001B[36mFile \u001B[39m\u001B[32m~\\PycharmProjects\\JupyterProject\\.venv\\Lib\\site-packages\\pandas\\core\\generic.py:6321\u001B[39m, in \u001B[36mNDFrame.__getattr__\u001B[39m\u001B[34m(self, name)\u001B[39m\n\u001B[32m 6314\u001B[39m \u001B[38;5;28;01mif\u001B[39;00m (\n\u001B[32m 6315\u001B[39m name \u001B[38;5;129;01mnot\u001B[39;00m \u001B[38;5;129;01min\u001B[39;00m \u001B[38;5;28mself\u001B[39m._internal_names_set\n\u001B[32m 6316\u001B[39m \u001B[38;5;129;01mand\u001B[39;00m name \u001B[38;5;129;01mnot\u001B[39;00m \u001B[38;5;129;01min\u001B[39;00m \u001B[38;5;28mself\u001B[39m._metadata\n\u001B[32m 6317\u001B[39m \u001B[38;5;129;01mand\u001B[39;00m name \u001B[38;5;129;01mnot\u001B[39;00m \u001B[38;5;129;01min\u001B[39;00m \u001B[38;5;28mself\u001B[39m._accessors\n\u001B[32m 6318\u001B[39m \u001B[38;5;129;01mand\u001B[39;00m \u001B[38;5;28mself\u001B[39m._info_axis._can_hold_identifiers_and_holds_name(name)\n\u001B[32m 6319\u001B[39m ):\n\u001B[32m 6320\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[38;5;28mself\u001B[39m[name]\n\u001B[32m-> \u001B[39m\u001B[32m6321\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[38;5;28;43mobject\u001B[39;49m\u001B[43m.\u001B[49m\u001B[34;43m__getattribute__\u001B[39;49m\u001B[43m(\u001B[49m\u001B[38;5;28;43mself\u001B[39;49m\u001B[43m,\u001B[49m\u001B[43m \u001B[49m\u001B[43mname\u001B[49m\u001B[43m)\u001B[49m\n", | |
| "\u001B[36mFile \u001B[39m\u001B[32m~\\PycharmProjects\\JupyterProject\\.venv\\Lib\\site-packages\\pandas\\core\\accessor.py:224\u001B[39m, in \u001B[36mCachedAccessor.__get__\u001B[39m\u001B[34m(self, obj, cls)\u001B[39m\n\u001B[32m 221\u001B[39m \u001B[38;5;28;01mif\u001B[39;00m obj \u001B[38;5;129;01mis\u001B[39;00m \u001B[38;5;28;01mNone\u001B[39;00m:\n\u001B[32m 222\u001B[39m \u001B[38;5;66;03m# we're accessing the attribute of the class, i.e., Dataset.geo\u001B[39;00m\n\u001B[32m 223\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m \u001B[38;5;28mself\u001B[39m._accessor\n\u001B[32m--> \u001B[39m\u001B[32m224\u001B[39m accessor_obj = \u001B[38;5;28;43mself\u001B[39;49m\u001B[43m.\u001B[49m\u001B[43m_accessor\u001B[49m\u001B[43m(\u001B[49m\u001B[43mobj\u001B[49m\u001B[43m)\u001B[49m\n\u001B[32m 225\u001B[39m \u001B[38;5;66;03m# Replace the property with the accessor object. Inspired by:\u001B[39;00m\n\u001B[32m 226\u001B[39m \u001B[38;5;66;03m# https://www.pydanny.com/cached-property.html\u001B[39;00m\n\u001B[32m 227\u001B[39m \u001B[38;5;66;03m# We need to use object.__setattr__ because we overwrite __setattr__ on\u001B[39;00m\n\u001B[32m 228\u001B[39m \u001B[38;5;66;03m# NDFrame\u001B[39;00m\n\u001B[32m 229\u001B[39m \u001B[38;5;28mobject\u001B[39m.\u001B[34m__setattr__\u001B[39m(obj, \u001B[38;5;28mself\u001B[39m._name, accessor_obj)\n", | |
| "\u001B[36mFile \u001B[39m\u001B[32m~\\PycharmProjects\\JupyterProject\\.venv\\Lib\\site-packages\\pandas\\core\\indexes\\accessors.py:643\u001B[39m, in \u001B[36mCombinedDatetimelikeProperties.__new__\u001B[39m\u001B[34m(cls, data)\u001B[39m\n\u001B[32m 640\u001B[39m \u001B[38;5;28;01melif\u001B[39;00m \u001B[38;5;28misinstance\u001B[39m(data.dtype, PeriodDtype):\n\u001B[32m 641\u001B[39m \u001B[38;5;28;01mreturn\u001B[39;00m PeriodProperties(data, orig)\n\u001B[32m--> \u001B[39m\u001B[32m643\u001B[39m \u001B[38;5;28;01mraise\u001B[39;00m \u001B[38;5;167;01mAttributeError\u001B[39;00m(\u001B[33m\"\u001B[39m\u001B[33mCan only use .dt accessor with datetimelike values\u001B[39m\u001B[33m\"\u001B[39m)\n", | |
| "\u001B[31mAttributeError\u001B[39m: Can only use .dt accessor with datetimelike values" | |
| ] | |
| } | |
| ], | |
| "execution_count": 6 | |
| } | |
| ], | |
| "metadata": { | |
| "kernelspec": { | |
| "display_name": "Python 3", | |
| "language": "python", | |
| "name": "python3" | |
| }, | |
| "language_info": { | |
| "codemirror_mode": { | |
| "name": "ipython", | |
| "version": 2 | |
| }, | |
| "file_extension": ".py", | |
| "mimetype": "text/x-python", | |
| "name": "python", | |
| "nbconvert_exporter": "python", | |
| "pygments_lexer": "ipython2", | |
| "version": "2.7.6" | |
| } | |
| }, | |
| "nbformat": 4, | |
| "nbformat_minor": 5 | |
| } |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment