I hereby claim:
- I am ghodsizadeh on github.
- I am mghodsizadeh (https://keybase.io/mghodsizadeh) on keybase.
- I have a public key whose fingerprint is 8F84 3C83 D14A 1591 ABF9 D494 BDF1 6EFF 8CFC 5574
To claim this, I am signing this object:
| # run `uv run https://gist.githubusercontent.com/ghodsizadeh/476e1d3833ac5bca817125c0c59294ce/raw/e33e56a99d10bee2b1a4f432cb9456c478ee50ed/amsterdam_weather.py` | |
| # and you'll see the result | |
| # /// script | |
| # dependencies = [ | |
| # "requests<3", | |
| # "rich", | |
| # ] | |
| # /// | |
| import requests | |
| from rich.console import Console |
I hereby claim:
To claim this, I am signing this object:
| #!/bin/bash | |
| # Inspired by | |
| # https://www.codeenigma.com/community/blog/using-mdbtools-nix-convert-microsoft-access-mysql | |
| # USAGE | |
| # Rename your MDB file to migration-export.mdb | |
| # run ./mdb2sqlite.sh migration-export.mdb | |
| # wait and wait a bit longer... | |
| version: '3.9' | |
| services: | |
| metabase-secrets: | |
| image: metabase/metabase:latest | |
| container_name: metabase-secrets | |
| hostname: metabase-secrets | |
| volumes: | |
| - /dev/urandom:/dev/random:ro | |
| ports: | |
| - 3000:3000 |
| import pandas as pd | |
| import requests | |
| def get_raw_data(stock_id): | |
| ''' | |
| get raw data of live price request | |
| ''' | |
| url = f'http://www.tsetmc.com/tsev2/data/instinfodata.aspx?i={stock_id}&c=70%20' |
| from sklearn.linear_model import LogisticRegression | |
| lr = LogisticRegression( C =100.0, random_state=1, solver='lbfgs',multi_class='ovr') | |
| ## uncomment following line to check multi_class option | |
| # lr = LogisticRegression( C =100.0, random_state=1, solver='lbfgs',multi_class='multinomial') | |
| lr.fit(X_train_std, y_train) | |
| lr.score(X_test_std, y_test) |
| from sklearn.preprocessing import StandardScaler | |
| sc = StandardScaler() | |
| sc.fit(X_train) | |
| X_train_std = sc.transform(X_train) | |
| X_test_std = sc.transform(X_test) |
| from sklearn import datasets | |
| iris = datasets.load_iris() | |
| X = iris.data[:,[2,3]] # just get two features of iris dataset, to plot decision boundry | |
| y = iris.target | |
| print('Class Labels', np.unique(y)) | |
| from sklearn.model_selection import train_test_split |
| from sklearn import datasets | |
| import numpy as np | |
| import matplotlib.pyplot as plt |