Minor cleanup
This commit is contained in:
parent
8c5db56796
commit
da1ba3cfd5
4 changed files with 169 additions and 30 deletions
141
.gitignore
vendored
141
.gitignore
vendored
|
@ -3,3 +3,144 @@ venv/
|
|||
service_account.json
|
||||
|
||||
config.ini
|
||||
|
||||
.vscode
|
||||
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
|
|
@ -27,7 +27,7 @@ RANGE_NAME = #METOO
|
|||
|
||||
5. Give the service account's email (read) access to the sheet
|
||||
|
||||
6. `python app.py`
|
||||
6. `python src/app.py`
|
||||
|
||||
## TODO
|
||||
|
||||
|
|
22
src/app.py
Normal file
22
src/app.py
Normal file
|
@ -0,0 +1,22 @@
|
|||
from sheet_data import google_sheet_to_json
|
||||
|
||||
from flask import Flask
|
||||
from flask_cors import CORS, cross_origin
|
||||
app = Flask(__name__)
|
||||
cors = CORS(app)
|
||||
app.config['CORS_HEADERS'] = 'Content-Type'
|
||||
|
||||
import configparser
|
||||
CONFIG = configparser.ConfigParser()
|
||||
CONFIG.read("config.ini")
|
||||
|
||||
SPREADSHEET_ID = CONFIG["Google"]["SHEET_ID"]
|
||||
RANGE_NAME = CONFIG["Google"]["RANGE_NAME"]
|
||||
|
||||
@app.route('/data.json')
|
||||
@cross_origin()
|
||||
def data_json():
|
||||
return google_sheet_to_json(SPREADSHEET_ID, RANGE_NAME)
|
||||
|
||||
if __name__ == "__main__":
|
||||
app.run()
|
|
@ -3,33 +3,18 @@ from httplib2 import Http
|
|||
from google.oauth2 import service_account
|
||||
import json
|
||||
|
||||
# Flask shizzle
|
||||
from flask import Flask
|
||||
from flask_cors import CORS, cross_origin
|
||||
app = Flask(__name__)
|
||||
cors = CORS(app)
|
||||
app.config['CORS_HEADERS'] = 'Content-Type'
|
||||
|
||||
import configparser
|
||||
CONFIG = configparser.ConfigParser()
|
||||
CONFIG.read("config.ini")
|
||||
|
||||
|
||||
SPREADSHEET_ID = CONFIG["Google"]["SHEET_ID"]
|
||||
RANGE_NAME = CONFIG["Google"]["RANGE_NAME"]
|
||||
|
||||
def get_google_sheet():
|
||||
def get_google_sheet(spreadsheet_id, range_name):
|
||||
""" Retrieve sheet data using OAuth credentials and Google Python API. """
|
||||
credentials = service_account.Credentials.from_service_account_file('service_account.json')
|
||||
scoped_credentials = credentials.with_scopes(['https://www.googleapis.com/auth/spreadsheets.readonly'])
|
||||
service = build('sheets', 'v4', credentials=scoped_credentials)
|
||||
|
||||
# Call the Sheets API
|
||||
gsheet = service.spreadsheets().values().get(spreadsheetId=SPREADSHEET_ID, range=RANGE_NAME).execute()
|
||||
gsheet = service.spreadsheets().values().get(spreadsheetId=spreadsheet_id, range=range_name).execute()
|
||||
return gsheet
|
||||
|
||||
def google_sheet_to_json():
|
||||
sheet = get_google_sheet()
|
||||
def google_sheet_to_json(spreadsheet_id, range_name):
|
||||
sheet = get_google_sheet(spreadsheet_id, range_name)
|
||||
data = sheet["values"]
|
||||
return json.dumps([create_point(row) for row in data[1:]])
|
||||
|
||||
|
@ -61,13 +46,4 @@ def create_point(row):
|
|||
"extra": extra,
|
||||
"type": location_type,
|
||||
}
|
||||
}
|
||||
|
||||
@app.route('/data.json')
|
||||
@cross_origin()
|
||||
def data_json():
|
||||
return google_sheet_to_json()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
app.run()
|
||||
}
|
Loading…
Reference in a new issue