Initial commit
This commit is contained in:
commit
f35779de6d
11 changed files with 555 additions and 0 deletions
104
.gitignore
vendored
Normal file
104
.gitignore
vendored
Normal file
|
@ -0,0 +1,104 @@
|
|||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
.hypothesis/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
.static_storage/
|
||||
.media/
|
||||
local_settings.py
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# pyenv
|
||||
.python-version
|
||||
|
||||
# celery beat schedule file
|
||||
celerybeat-schedule
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
14
CHANGELOG.md
Normal file
14
CHANGELOG.md
Normal file
|
@ -0,0 +1,14 @@
|
|||
# Changelog
|
||||
All notable changes to this project will be documented in this file.
|
||||
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [Unreleased]
|
||||
|
||||
### Added
|
||||
### Changed
|
||||
### Deprecated
|
||||
### Removed
|
||||
### Fixed
|
||||
### Security
|
26
README.md
Normal file
26
README.md
Normal file
|
@ -0,0 +1,26 @@
|
|||
# pancakecounter
|
||||
|
||||
Use `MM_USERNAME=username MM_PASSWORD=password ./read_mattermost.py --live > verifications.log` to connect to Mattermost and read the relevant emoji events.
|
||||
Drop the `--live` if you just want to read once and don't want to keep watching.
|
||||
|
||||
Use `./make_table.py < verifications` to create a table based on these events.
|
||||
|
||||
As an alternative to `MM_USERNAME=username MM_PASSWORD=password` you can also use `MM_TOKEN=token`.
|
||||
|
||||
## Installation
|
||||
Install the requirements in `requirements.txt` with `pip install -r requirements.txt`.
|
||||
|
||||
If you want to create a virtualenv in this directory and install the dependencies in it, you could
|
||||
instead use
|
||||
```
|
||||
tools/create_venv.sh
|
||||
```
|
||||
|
||||
Activate the virtualenv with `source venv/bin/activate`. To make this easier, you could create
|
||||
an [alias][] `alias venv='source venv/bin/activate'` in your shell.
|
||||
|
||||
[alias]: https://www.computerworld.com/article/2598087/how-to-use-aliases-in-linux-shell-commands.html
|
||||
|
||||
## Development
|
||||
If you introduce dependencies, list them in `setup.py` under `install_requires`, and run
|
||||
`tools/update_requirements.sh`.
|
42
make_table.py
Executable file
42
make_table.py
Executable file
|
@ -0,0 +1,42 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
import re
|
||||
from datetime import datetime
|
||||
from collections import defaultdict
|
||||
from functools import partial
|
||||
|
||||
if sys.stdin.isatty():
|
||||
print("Hint: stdin is a terminal, you may want to do `./make_table.py < verifications.log` instead.", file=sys.stderr)
|
||||
|
||||
users = defaultdict(partial(defaultdict, dict))
|
||||
|
||||
for line_nr, line in enumerate(sys.stdin, start=1):
|
||||
line = line.rstrip()
|
||||
|
||||
m = re.fullmatch(r"([^ ]+) ([^ ]+) at ([^ ]+) verified by ([^ ]+) at ([^ ]+)", line)
|
||||
if m:
|
||||
awardee = m.group(1)
|
||||
post_id = m.group(2)
|
||||
post_time = datetime.fromisoformat(m.group(3))
|
||||
verifier = m.group(4)
|
||||
verification_time = datetime.fromisoformat(m.group(5))
|
||||
|
||||
users[awardee][post_id][verifier] = verification_time
|
||||
continue
|
||||
|
||||
m = re.fullmatch(r"([^ ]+) ([^ ]+) verification removed by ([^ ]+)", line)
|
||||
if m:
|
||||
awardee = m.group(1)
|
||||
post_id = m.group(2)
|
||||
verifier = m.group(3)
|
||||
|
||||
try:
|
||||
del users[awardee][post_id][verifier]
|
||||
except KeyError:
|
||||
print(f"Trying to remove non-existing verification by {verifier}, looks like the file is corrupt!", file=sys.stderr)
|
||||
|
||||
continue
|
||||
|
||||
for username, user_posts in users.items():
|
||||
print(f"{username}: {len(user_posts)}")
|
202
read_mattermost.py
Executable file
202
read_mattermost.py
Executable file
|
@ -0,0 +1,202 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import sys
|
||||
import os
|
||||
from collections import defaultdict
|
||||
import datetime
|
||||
import threading
|
||||
from time import sleep
|
||||
import json
|
||||
import mattermost
|
||||
import mattermost.ws
|
||||
|
||||
SERVER = "mattermost.zeus.gent"
|
||||
TEAM_NAME = "zeus"
|
||||
CHAN_NAME = "pannenkoeken"
|
||||
EMOJI_NAME = "pancakes"
|
||||
SINCE = datetime.datetime.strptime("2020-11-01 00:00", "%Y-%m-%d %H:%M") \
|
||||
.astimezone(datetime.timezone.utc)
|
||||
TAGGERS = [
|
||||
# Board
|
||||
"flynn",
|
||||
"bobby",
|
||||
"pcassima",
|
||||
"redfast00",
|
||||
"francis.",
|
||||
"hannes",
|
||||
"arnhoudt",
|
||||
"mel",
|
||||
|
||||
# Sneaky backdoor (actually just for testing and if you read this, nobody has removed it)
|
||||
"midgard",
|
||||
]
|
||||
|
||||
TOKEN = os.getenv("MM_ACCESS_TOKEN")
|
||||
USER = os.getenv("MM_USERNAME")
|
||||
PASSWORD = os.getenv("MM_PASSWORD")
|
||||
|
||||
|
||||
def first(iterable, default=None):
|
||||
for x in iterable:
|
||||
return x
|
||||
return default
|
||||
|
||||
|
||||
def get_posts_for_channel(mmapi, channel_id, since, **kwargs):
|
||||
after = None
|
||||
while True:
|
||||
data_page = mmapi._get("/v4/channels/"+channel_id+"/posts", params=(
|
||||
{ "after": after }
|
||||
if after else
|
||||
{ "since": to_mm_timestamp(since) }
|
||||
), **kwargs)
|
||||
|
||||
order = list(reversed(data_page["order"]))
|
||||
for post_id in order:
|
||||
yield data_page["posts"][post_id]
|
||||
|
||||
if not order:
|
||||
return
|
||||
after = order[-1]
|
||||
|
||||
|
||||
##################################
|
||||
# Log in
|
||||
mm = mattermost.MMApi(f"https://{SERVER}/api")
|
||||
|
||||
if TOKEN:
|
||||
mm.login(bearer=TOKEN)
|
||||
else:
|
||||
assert USER
|
||||
assert PASSWORD
|
||||
mm.login(USER, PASSWORD)
|
||||
|
||||
|
||||
##################################
|
||||
# Get channel
|
||||
team_data = first(filter(lambda team: team["name"] == TEAM_NAME, mm.get_teams()))
|
||||
assert team_data, "Team should exist"
|
||||
|
||||
channel_data = first(filter(lambda chan: chan["name"] == CHAN_NAME, mm.get_team_channels(team_data["id"])))
|
||||
assert channel_data, "Channel should exist"
|
||||
channel = channel_data["id"]
|
||||
|
||||
|
||||
##################################
|
||||
# Get users
|
||||
|
||||
# People who are authorized to grant pancakes
|
||||
tagger_ids = {u["id"]: u["username"] for u in mm.get_users_by_usernames_list(TAGGERS)}
|
||||
|
||||
|
||||
users = {u["id"]: u for u in mm.get_users(in_channel=channel)}
|
||||
|
||||
for user in users.values():
|
||||
assert user["username"].find(" ") == -1, f"{user['username']} shouldn't have spaces in username"
|
||||
|
||||
def get_username(userid):
|
||||
# When someone joined later
|
||||
if userid not in users:
|
||||
users[userid] = mm.get_user(userid)
|
||||
|
||||
return users[userid]["username"]
|
||||
|
||||
|
||||
##################################
|
||||
# Get posts
|
||||
posts = {}
|
||||
def get_post(postid):
|
||||
if postid not in posts:
|
||||
posts[postid] = mm.get_post(postid)
|
||||
|
||||
return posts[postid]
|
||||
|
||||
|
||||
def parse_mm_timestamp(mm_timestamp):
|
||||
return datetime.datetime.fromtimestamp(mm_timestamp / 1000, datetime.timezone.utc)
|
||||
|
||||
def to_mm_timestamp(dt):
|
||||
return int(dt.timestamp() * 1000)
|
||||
|
||||
|
||||
def reaction_qualifies(reaction):
|
||||
return reaction["emoji_name"] == EMOJI_NAME and reaction["user_id"] in tagger_ids
|
||||
|
||||
|
||||
awarded = defaultdict(set)
|
||||
def award_if_appropriate(reaction):
|
||||
if not reaction_qualifies(reaction):
|
||||
return
|
||||
|
||||
post = get_post(reaction["post_id"])
|
||||
if parse_mm_timestamp(post["create_at"]) < SINCE:
|
||||
return
|
||||
|
||||
awarder_id = reaction["user_id"]
|
||||
if awarder_id in awarded[post["id"]]:
|
||||
return
|
||||
awarded[post["id"]].add(awarder_id)
|
||||
|
||||
reaction_time = parse_mm_timestamp(reaction["create_at"]).isoformat(timespec="microseconds")
|
||||
post_time = parse_mm_timestamp(reaction["create_at"]).isoformat(timespec="microseconds")
|
||||
awardee = get_username(post["user_id"])
|
||||
awarder = get_username(awarder_id)
|
||||
print(f"{awardee} {post['id']} at {post_time} verified by {awarder} at {reaction_time}", flush=True)
|
||||
|
||||
|
||||
def retract_if_appropriate(reaction):
|
||||
if not reaction_qualifies(reaction):
|
||||
return
|
||||
|
||||
post = get_post(reaction["post_id"])
|
||||
if parse_mm_timestamp(post["create_at"]) < SINCE:
|
||||
return
|
||||
|
||||
awarder_id = reaction["user_id"]
|
||||
awarded[post["id"]].discard(awarder_id)
|
||||
|
||||
awardee = get_username(post["user_id"])
|
||||
awarder = get_username(awarder_id)
|
||||
print(f"{awardee} {post['id']} verification removed by {awarder}", flush=True)
|
||||
|
||||
|
||||
def handle_backlog(since):
|
||||
for post in get_posts_for_channel(mm, channel, since):
|
||||
for reaction in post.get("metadata", {}).get("reactions", []):
|
||||
award_if_appropriate(reaction)
|
||||
|
||||
|
||||
def handle_live():
|
||||
def ws_handler(mmws, event_data):
|
||||
if event_data["broadcast"]["channel_id"] != channel:
|
||||
return
|
||||
|
||||
if event_data["event"] == "reaction_added":
|
||||
award_if_appropriate(json.loads(event_data["data"]["reaction"]))
|
||||
elif event_data["event"] == "reaction_removed":
|
||||
retract_if_appropriate(json.loads(event_data["data"]["reaction"]))
|
||||
|
||||
ws = mattermost.ws.MMws(ws_handler, mm, f"wss://{SERVER}/api/v4/websocket")
|
||||
while True:
|
||||
sleep(60 * 1000)
|
||||
|
||||
|
||||
live = "--live" in sys.argv[1:]
|
||||
|
||||
# Note: skipping this step and updating an existing file would be dangerous: you would miss revocations that happened while not listening.
|
||||
handle_backlog(SINCE)
|
||||
|
||||
if sys.stdout.isatty():
|
||||
print("To use this data, redirect stdout to a file and use make_table.py on it.", file=sys.stderr)
|
||||
|
||||
if live:
|
||||
print("Now watching for live posts.", file=sys.stderr)
|
||||
handle_live()
|
||||
|
||||
else:
|
||||
print("Use --live to keep watching new posts.", file=sys.stderr)
|
||||
|
||||
|
||||
# Logout
|
||||
if not TOKEN:
|
||||
mm.revoke_user_session()
|
51
requirements.txt
Normal file
51
requirements.txt
Normal file
|
@ -0,0 +1,51 @@
|
|||
#
|
||||
# This file is autogenerated. To update, run:
|
||||
# tools/update_requirements.sh
|
||||
#
|
||||
certifi==2020.11.8 \
|
||||
--hash=sha256:1f422849db327d534e3d0c5f02a263458c3955ec0aae4ff09b95f195c59f4edd \
|
||||
--hash=sha256:f05def092c44fbf25834a51509ef6e631dc19765ab8a57b4e7ab85531f0a9cf4 \
|
||||
# via requests
|
||||
chardet==3.0.4 \
|
||||
--hash=sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae \
|
||||
--hash=sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691 \
|
||||
# via requests
|
||||
idna==2.10 \
|
||||
--hash=sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6 \
|
||||
--hash=sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0 \
|
||||
# via requests
|
||||
mattermost==5.29.1 \
|
||||
--hash=sha256:0e12fef4c510bd92629fac9bdee41ecab9cce879c87d8e63905102370aba7c08 \
|
||||
# via pancakecounter (setup.py)
|
||||
requests==2.25.0 \
|
||||
--hash=sha256:7f1a0b932f4a60a1a65caa4263921bb7d9ee911957e0ae4a23a6dd08185ad5f8 \
|
||||
--hash=sha256:e786fa28d8c9154e6a4de5d46a1d921b8749f8b74e28bde23768e5e16eece998 \
|
||||
# via mattermost
|
||||
urllib3==1.26.2 \
|
||||
--hash=sha256:19188f96923873c92ccb987120ec4acaa12f0461fa9ce5d3d0772bc965a39e08 \
|
||||
--hash=sha256:d8ff90d979214d7b4f8ce956e80f4028fc6860e4431f731ea4a8c08f23f99473 \
|
||||
# via requests
|
||||
websockets==8.1 \
|
||||
--hash=sha256:0e4fb4de42701340bd2353bb2eee45314651caa6ccee80dbd5f5d5978888fed5 \
|
||||
--hash=sha256:1d3f1bf059d04a4e0eb4985a887d49195e15ebabc42364f4eb564b1d065793f5 \
|
||||
--hash=sha256:20891f0dddade307ffddf593c733a3fdb6b83e6f9eef85908113e628fa5a8308 \
|
||||
--hash=sha256:295359a2cc78736737dd88c343cd0747546b2174b5e1adc223824bcaf3e164cb \
|
||||
--hash=sha256:2db62a9142e88535038a6bcfea70ef9447696ea77891aebb730a333a51ed559a \
|
||||
--hash=sha256:3762791ab8b38948f0c4d281c8b2ddfa99b7e510e46bd8dfa942a5fff621068c \
|
||||
--hash=sha256:3db87421956f1b0779a7564915875ba774295cc86e81bc671631379371af1170 \
|
||||
--hash=sha256:3ef56fcc7b1ff90de46ccd5a687bbd13a3180132268c4254fc0fa44ecf4fc422 \
|
||||
--hash=sha256:4f9f7d28ce1d8f1295717c2c25b732c2bc0645db3215cf757551c392177d7cb8 \
|
||||
--hash=sha256:5c01fd846263a75bc8a2b9542606927cfad57e7282965d96b93c387622487485 \
|
||||
--hash=sha256:5c65d2da8c6bce0fca2528f69f44b2f977e06954c8512a952222cea50dad430f \
|
||||
--hash=sha256:751a556205d8245ff94aeef23546a1113b1dd4f6e4d102ded66c39b99c2ce6c8 \
|
||||
--hash=sha256:7ff46d441db78241f4c6c27b3868c9ae71473fe03341340d2dfdbe8d79310acc \
|
||||
--hash=sha256:965889d9f0e2a75edd81a07592d0ced54daa5b0785f57dc429c378edbcffe779 \
|
||||
--hash=sha256:9b248ba3dd8a03b1a10b19efe7d4f7fa41d158fdaa95e2cf65af5a7b95a4f989 \
|
||||
--hash=sha256:9bef37ee224e104a413f0780e29adb3e514a5b698aabe0d969a6ba426b8435d1 \
|
||||
--hash=sha256:c1ec8db4fac31850286b7cd3b9c0e1b944204668b8eb721674916d4e28744092 \
|
||||
--hash=sha256:c8a116feafdb1f84607cb3b14aa1418424ae71fee131642fc568d21423b51824 \
|
||||
--hash=sha256:ce85b06a10fc65e6143518b96d3dca27b081a740bae261c2fb20375801a9d56d \
|
||||
--hash=sha256:d705f8aeecdf3262379644e4b55107a3b55860eb812b673b28d0fbc347a60c55 \
|
||||
--hash=sha256:e898a0863421650f0bebac8ba40840fc02258ef4714cb7e1fd76b6a6354bda36 \
|
||||
--hash=sha256:f8a7bff6e8664afc4e6c28b983845c5bc14965030e3fb98789734d416af77c4b \
|
||||
# via mattermost
|
51
setup.py
Executable file
51
setup.py
Executable file
|
@ -0,0 +1,51 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import setuptools
|
||||
|
||||
|
||||
with open("README.md", "r") as fh:
|
||||
long_description = fh.read()
|
||||
|
||||
|
||||
setuptools.setup(
|
||||
name="pancakecounter",
|
||||
version="0.1",
|
||||
author="Midgard",
|
||||
author_email="",
|
||||
description="Count pancake emoji reactions",
|
||||
long_description=long_description,
|
||||
long_description_content_type="text/markdown",
|
||||
scripts=["make_table.py", "read_mattermost.py"],
|
||||
|
||||
url="https://git.zeus.gent/kelder/pancakecounter",
|
||||
project_urls={
|
||||
"Source": "https://git.zeus.gent/kelder/pancakecounter",
|
||||
"Change log": "https://git.zeus.gent/kelder/pancakecounter/-/blob/master/CHANGELOG.md",
|
||||
"Bug tracker": "https://git.zeus.gent/kelder/pancakecounter/-/issues",
|
||||
},
|
||||
|
||||
classifiers=[
|
||||
"Programming Language :: Python :: 3",
|
||||
"License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)",
|
||||
"Operating System :: OS Independent",
|
||||
"Natural Language :: English",
|
||||
"Environment :: Console",
|
||||
|
||||
# "Development Status :: 1 - Planning",
|
||||
#"Development Status :: 2 - Pre-Alpha",
|
||||
#"Development Status :: 3 - Alpha",
|
||||
"Development Status :: 4 - Beta",
|
||||
#"Development Status :: 5 - Production/Stable",
|
||||
#"Development Status :: 6 - Mature",
|
||||
#"Development Status :: 7 - Inactive",
|
||||
|
||||
"Intended Audience :: End Users/Desktop",
|
||||
"Topic :: Utilities",
|
||||
],
|
||||
|
||||
packages=setuptools.find_packages(),
|
||||
python_requires=">=3.7",
|
||||
install_requires=[
|
||||
"mattermost",
|
||||
],
|
||||
)
|
4
tools/clean.sh
Executable file
4
tools/clean.sh
Executable file
|
@ -0,0 +1,4 @@
|
|||
#!/bin/sh
|
||||
|
||||
cd "`dirname "$0"`"/..
|
||||
rm -rf ./build/ ./*.egg-info/ ./dist/ ./__pycache__/ ./*/__pycache__/
|
8
tools/create_venv.sh
Executable file
8
tools/create_venv.sh
Executable file
|
@ -0,0 +1,8 @@
|
|||
#!/bin/sh
|
||||
|
||||
cd "`dirname "$0"`"/..
|
||||
|
||||
# Create virtualenv
|
||||
python3 -m virtualenv venv/
|
||||
# Install dependencies
|
||||
venv/bin/pip install -e .
|
37
tools/release.sh
Executable file
37
tools/release.sh
Executable file
|
@ -0,0 +1,37 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -e
|
||||
|
||||
cd $(dirname "$0")/..
|
||||
|
||||
tools/test.sh
|
||||
|
||||
if [ ! -t 0 ] ; then
|
||||
echo "release.sh should be run with a terminal attached to stdin" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
git status
|
||||
|
||||
echo -n "Previous version: v"
|
||||
./setup.py --version
|
||||
read -p "Enter new version: v" version
|
||||
|
||||
sed -i 's/version=".*"/version="'"$version"'"/' setup.py
|
||||
sed -i 's/## \[Unreleased\]/&\n\n## ['"$version"'] - '"$(date --utc +%Y-%m-%d)"'/' CHANGELOG.md
|
||||
echo; echo "Inspect CHANGELOG..."
|
||||
${EDITOR:-nano} CHANGELOG.md
|
||||
git add setup.py CHANGELOG.md
|
||||
git commit -m "Bump version to $version"
|
||||
|
||||
tagid=v"$version"
|
||||
echo "Creating git tag $tagid"
|
||||
git tag -s -m "Version $version" "$tagid"
|
||||
|
||||
./setup.py sdist bdist_wheel
|
||||
|
||||
read -p "Upload to Git and PyPI? (y/N) " confirm
|
||||
if [ ! "$confirm" = y ]; then "Abort"; exit 1; fi
|
||||
|
||||
python3 -m twine upload dist/*-${version}*
|
||||
git push origin "$tagid" master
|
16
tools/update_requirements.sh
Executable file
16
tools/update_requirements.sh
Executable file
|
@ -0,0 +1,16 @@
|
|||
#!/bin/sh
|
||||
|
||||
cd "`dirname "$0"`"/..
|
||||
|
||||
if [ ! -f venv/bin/pip-compile ]; then
|
||||
venv/bin/pip install pip-tools
|
||||
fi
|
||||
|
||||
cat <<EOF > requirements.txt
|
||||
#
|
||||
# This file is autogenerated. To update, run:
|
||||
# tools/update_requirements.sh
|
||||
#
|
||||
EOF
|
||||
venv/bin/pip-compile --quiet --generate-hashes --annotate --no-header --output-file="-" >> requirements.txt
|
||||
echo "Updated requirements.txt"
|
Loading…
Reference in a new issue