Created
January 19, 2022 08:26
-
-
Save s10018/b6cc2734074823e4ff73c6ae070865dd to your computer and use it in GitHub Desktop.
Google fitデータの獲得 自分用
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| # -*- coding: utf-8 -*- | |
| """ | |
| ※※ 無駄コード多数 | |
| """ | |
| import os | |
| import json | |
| import time | |
| import datetime | |
| from typing import Callable, TypedDict, Union, Optional, cast | |
| from googleapiclient.discovery import build | |
| from httplib2 import Http | |
| from oauth2client import file, client, tools | |
| import pandas as pd | |
| from tap import Tap | |
| SCOPES = [ | |
| 'https://www.googleapis.com/auth/fitness.activity.read', | |
| 'https://www.googleapis.com/auth/fitness.body.read', | |
| ] | |
| DATA_SOURCE_WEIGHT = "derived:com.google.weight:com.google.android.gms:merge_weight" | |
| class Value(TypedDict): | |
| intVal: int | |
| fpVal: float | |
| stringVal: str | |
| mapVal: list[dict] | |
| class DataDetail(TypedDict): | |
| startTimeNanos: str | |
| endTimeNanos: str | |
| dataTypeName: str | |
| originDataSourceId: str | |
| value: list[Value] | |
| modifiedTimeMillis: str | |
| class DataDict(TypedDict): | |
| minStartTimeNs: str | |
| maxEndTimeNs: str | |
| dataSourceId: str | |
| point: list[DataDetail] | |
| class DataList(TypedDict): | |
| dataSource: list[dict] | |
| def get_service(): | |
| store = file.Storage('token.json') | |
| creds = store.get() | |
| if not creds or creds.invalid: | |
| flow = client.flow_from_clientsecrets('./client_secret.json', SCOPES) | |
| creds = tools.run_flow(flow, store) | |
| # https://developers.google.com/fit/rest/v1/data-sources : https://www.googleapis.com/fitness/v1/users/me/dataSources | |
| service = build('fitness', 'v1', http=creds.authorize(Http())) | |
| return service | |
| def get_ns(nnn: str) -> datetime.datetime: | |
| d: datetime.datetime = pd.to_datetime(float(nnn), utc=True).tz_convert('Asia/Tokyo') | |
| return d | |
| def now_date() -> tuple[int, int, int]: | |
| """ | |
| get UNIX epochs padding 19 zero | |
| """ | |
| ndate = datetime.datetime.now(datetime.timezone(datetime.timedelta(hours=9), name='JST')) | |
| return (ndate.year, ndate.month, ndate.day) | |
| def fetch_data(do_update: bool, filename: str, do: Callable) -> Union[DataList, DataDict]: | |
| fetch_results: Optional[Union[DataList, DataDict]] = None | |
| if do_update: | |
| data_source = get_service().users().dataSources() | |
| fetch_results = do(data_source).execute() | |
| with open(filename, "w") as wrt: | |
| wrt.write(json.dumps(fetch_results)) | |
| else: | |
| with open(filename, "r") as rdr: | |
| fetch_results = json.load(rdr) | |
| assert fetch_results is not None | |
| return fetch_results | |
| def get_datalist_data(do_update: bool, result_fileprefix: str) -> DataList: | |
| result_filename = result_fileprefix + "list.json" | |
| fetch_results: DataList = cast(DataList, fetch_data(do_update, result_filename, lambda d: d.list(userId="me"))) | |
| return fetch_results | |
| def get_dataset_fetch_data(do_update: bool, sourcename: str, result_filename: str) -> DataDict: | |
| fetch_results: DataDict = cast(DataDict, fetch_data( | |
| do_update, result_filename, lambda d: d.datasets().get( | |
| userId='me', dataSourceId=sourcename, | |
| datasetId="1510448128000000000-" + str(time.time_ns()) | |
| ) | |
| )) | |
| return fetch_results | |
| def parse_weight_data(fetch_results: DataDict) -> dict[tuple[int, int, int], list[float]]: | |
| data_result: dict[tuple[int, int, int], list[float]] = {} | |
| for data in fetch_results["point"]: | |
| value = data['value'] | |
| if "fpVal" in value[0]: | |
| time = get_ns(data['startTimeNanos']) | |
| if (time.year, time.month, time.day) not in data_result: | |
| data_result[(time.year, time.month, time.day)] = [] | |
| data_result[(time.year, time.month, time.day)].append(value[0]["fpVal"]) | |
| return data_result | |
| class MyParser(Tap): | |
| do_update: bool = False | |
| result_fileprefix: str | |
| last_date: tuple[int, int, int] | |
| def configure(self): | |
| self.add_argument('-u', '--do-update', action="store_true", default=False) | |
| self.add_argument('-r', '--result-fileprefix', default="result-") | |
| self.add_argument('-d', '--last-date', default=now_date()) | |
| def do_weight(args): | |
| fetch_results = get_dataset_fetch_data( | |
| args.do_update, DATA_SOURCE_WEIGHT, args.result_fileprefix + "weight.json" | |
| ) | |
| data_weight_result = parse_weight_data(fetch_results) | |
| for date, flist in sorted(data_weight_result.items()): | |
| print(date, min(flist)) | |
| def main(): | |
| args = MyParser().parse_args() | |
| do_weight(args) | |
| if __name__ == '__main__': | |
| main() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment