Compare commits

..

1 commit

Author SHA1 Message Date
Midgard 285215b0f6
[WÏP] Add lschan 2021-06-02 11:13:00 +02:00
14 changed files with 398 additions and 1119 deletions

105
.gitignore vendored
View file

@ -1,104 +1,3 @@
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
.hypothesis/
# Translations
*.mo
*.pot
# Django stuff:
*.log
.static_storage/
.media/
local_settings.py
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
target/
# Jupyter Notebook
.ipynb_checkpoints
# pyenv
.python-version
# celery beat schedule file
celerybeat-schedule
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
*.pyc
*.pyo

View file

@ -1,8 +0,0 @@
# Changelog
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [Unreleased]
Initial release

View file

@ -1,18 +0,0 @@
# mmcli
## Install
With pip or pipx.
## Development
To create a virtualenv and install the dependencies in it:
```
tools/create_venv.sh
```
Activate the virtualenv with `source venv/bin/activate`. To make this easier, you could create
an [alias][] `alias venv='source venv/bin/activate'` in your shell.
[alias]: https://www.computerworld.com/article/2598087/how-to-use-aliases-in-linux-shell-commands.html
If you introduce dependencies, list them in `pyproject.toml` under `dependencies`, and run
`tools/update_requirements.sh`.

396
mmcli.py Executable file
View file

@ -0,0 +1,396 @@
#!/usr/bin/env python3
import sys
import argparse
import os
import json
from typing import Dict, Optional, List, Iterable
import re
from time import sleep
import threading
import mattermost
from mmws import MMws
class NotFound(Exception):
def __init__(self, type_: str, name: str):
super().__init__(f"{type_} {name} not found")
self.type = type_
self.name = name
def first(iterable, default=None):
for x in iterable:
return x
return default
def yes_no(x):
return "yes" if x else "no"
def http_to_ws(url):
"""
Transform url from http to ws and https to wss
"""
assert url.startswith("http://") or url.startswith("https://")
return "ws" + url[4:]
def get_posts_for_channel(self, channel_id: str, progress=lambda x: None, after=None, since=None, **kwargs) -> Iterable[Dict]:
"""
@raises ApiException: Passed on from lower layers.
"""
per_page = 200
page = 0
total = 0
# if after and since:
# raise ValueError("after and since cannot be used together")
if since:
raise Exception("'since' functionality is broken in the API and behaves non-deterministically. It cannot be meaningfully used.")
# Posts in channel updated after a given timestamp: pagination is broken in the API
# current_since = since
# while True:
# data_page = self._get(f"/v4/channels/{channel_id}/posts", params={"since": current_since, **kwargs})
# order = data_page["order"]
# yield from (
# data_page["posts"][post_id]
# for post_id in reversed(order)
# )
# total += len(order)
# progress(total)
# if len(order) < 1000: # For some reason the pages go up to 1000 posts if 'since' is given
# break
# current_since = data_page["posts"][order[0]]["create_at"]
# sleep(0.1)
elif after:
# Posts in channel after a given ID: API gives pages with OLDEST messages first, so we can
# yield each page when it is fetched
while True:
data_page = self._get(f"/v4/channels/{channel_id}/posts", params={"page": page, "per_page": per_page, "after": after, **kwargs})
order = data_page["order"]
yield from (
data_page["posts"][post_id]
for post_id in reversed(order)
)
total += len(order)
progress(total)
if len(order) < per_page:
break
page += 1
sleep(0.1)
else:
# All posts in channel: API gives pages with NEWEST messages first, so reverse the order in
# the end (and don't reverse the order of each page separately)
posts = []
while True:
data_page = self._get(f"/v4/channels/{channel_id}/posts", params={"page": page, "per_page": per_page, **kwargs})
order = data_page["order"]
posts.extend(
data_page["posts"][post_id]
for post_id in order
)
progress(len(posts))
if len(order) < per_page:
break
page += 1
sleep(0.1)
yield from reversed(posts)
ID_PREFIX = "id:"
def predicate_for_query(query: str):
"""
@return: a function that returns whether `query` matches its argument
"""
if query.startswith(ID_PREFIX):
id_ = query[len(ID_PREFIX):]
return lambda x: x["id"] == id_
else:
return lambda x: x["name"] == query
def resolve_team(mm_api: mattermost.MMApi, query: str) -> Optional[Dict]:
return first(filter(
predicate_for_query(query),
mm_api.get_teams()
))
def resolve_channel(mm_api: mattermost.MMApi, team_id: str, query: str) -> Optional[Dict]:
return first(filter(
predicate_for_query(query),
mm_api.get_team_channels(team_id)
))
def resolve_team_channel(mm_api: mattermost.MMApi, query: str) -> Dict:
query_parts = query.split("/")
del query
if len(query_parts) != 2:
raise ValueError("Team/channel ID should be '<team>/<channel>'")
team = resolve_team(mm_api, query_parts[0])
if not team:
raise NotFound("team", query_parts[0])
channel = resolve_channel(mm_api, team["id"], query_parts[1])
if not channel:
return NotFound("channel", query_parts[1])
return team, channel
def login(mm_api, parsed):
print(
f"Logging in as {parsed.user}; password provided: {yes_no(parsed.password)}; "
f"TOTP token provided: {yes_no(parsed.totp)}",
file=sys.stderr)
mm_api.login(parsed.user, parsed.password, parsed.totp)
if parsed.format == "json":
print(json.dumps({"token": mm_api._bearer}))
elif parsed.format == "tsv":
print(mm_api._bearer)
else:
assert False
def lschan(mm_api: mattermost.MMApi, parsed):
teams = {}
for team in mm_api.get_teams():
teams[team["id"]] = team
teams[team["id"]]["joined"] = False
for team in mm_api._get(f"/v4/users/me/teams"):
teams[team["id"]] = team
teams[team["id"]]["joined"] = True
mm_api.get_team_channels()
mm_api.get_channels_for_user("me")
def cat(mm_api: mattermost.MMApi, parsed):
# channels = [
# resolve_team_channel(mm_api, query)
# for query in parsed.channels
# ]
team, channel = resolve_team_channel(mm_api, parsed.channel)
users = list(mm_api.get_users())
if not parsed.ids:
def attribute(key_value):
key, value = key_value
if key == "channel_id":
assert value == channel["id"]
return "channel", channel["name"]
if key == "user_id":
return "username", first(u["username"] for u in users if u["id"] == value)
return key_value
else:
def attribute(key_value):
return key_value
# In a list to allow overwriting from within print_initial_messages without using global
backlog = [ [] ]
backlog_lock = threading.Lock()
def print_initial_messages():
posts = get_posts_for_channel(mm_api, channel["id"], after=parsed.after, since=parsed.since)
for post in posts:
print(str_for_post(attribute, post, parsed))
with backlog_lock:
for post in backlog[0]:
print(str_for_post(attribute, post, parsed))
backlog[0] = None
if parsed.follow:
def simple_websocket_callback(mmws, event_data):
if event_data.get("event") == "posted":
post = json.loads(event_data["data"]["post"])
if post["channel_id"] != channel["id"]:
return
print(str_for_post(attribute, post, parsed), flush=True)
def initial_websocket_callback(mmws: MMws, event_data):
if event_data.get("event") == "posted":
post = json.loads(event_data["data"]["post"])
if post["channel_id"] != channel["id"]:
return
with backlog_lock:
if backlog[0] is not None:
backlog[0].append(post)
return
else:
mmws.ws_handler = simple_websocket_callback
simple_websocket_callback(mmws, event_data)
ws_url = http_to_ws(mm_api._url) + "/v4/websocket"
mmws = MMws(initial_websocket_callback, mm_api.access_token, ws_url)
if parsed.follow:
thread = threading.Thread(target=print_initial_messages)
thread.setDaemon(True)
thread.start()
mmws.run_websocket()
else:
print_initial_messages()
def send(mm_api: mattermost.MMApi, parsed):
read_stdin = parsed.message is None or parsed.channel is None
team, channel = resolve_team_channel(mm_api, parsed.channel) if parsed.channel is not None else (None, None)
if read_stdin:
if sys.stdin.isatty():
print("Reading from tty. (You can type the message objects below. Or maybe you meant to redirect something to stdin.)", file=sys.stderr)
for line in sys.stdin:
msg = json.loads(line)
if "channel_id" in msg:
channel_id = msg["channel_id"]
elif "channel" in msg:
_, local_channel = resolve_team_channel(mm_api, msg["channel"])
channel_id = local_channel["id"]
elif channel is not None:
channel_id = channel["id"]
else:
print(f"Illegal message, missing channel: {line.strip()}", file=sys.stderr)
raise ValueError("Illegal message, missing channel")
sent = mm_api.create_post(channel_id, msg["message"], props={"from_mmcli": "true"}, filepaths=msg.get("attachments", msg.get("attachments")))
print(sent)
else:
sent = mm_api.create_post(channel["id"], parsed.message, props={"from_mmcli": "true"}, filepaths=parsed.attach)
print(sent)
def str_for_post(attribute, post, parsed):
obj = {
k: v
for k, v in map(attribute, post.items())
if (v or k == "message") and (k != "update_at" or post["update_at"] != post["create_at"])
}
if parsed.format == "json":
return json.dumps(obj)
if parsed.format == "tsv":
msg = obj.get("message", "").replace("\\", "\\\\").replace("\t", r"\t").replace("\n", r"\n")
return f"{obj['id']}\t{obj['create_at']}\t{obj.get('username') or obj['user_id']}\t{msg}"
assert False
ACTIONS = {
"lschan": {"function": lschan},
"login": {"function": login, "accesstoken_required": False},
"cat": {"function": cat},
"send": {"function": send},
}
FORMATTERS = { "json", "tsv" }
ENVVAR_SERVER = "MM_SERVER"
ENVVAR_USERNAME = "MM_USERNAME"
ENVVAR_PASSWORD = "MM_PASSWORD"
ENVVAR_TOTP = "MM_TOTP"
ENVVAR_ACCESSTOKEN = "MM_ACCESSTOKEN"
def main():
prog_name = os.path.basename(sys.argv[0])
description = "Interact with Mattermost on the CLI"
epilog = f"""
For further help, use `{prog_name} <action> -h`.
Where a "URL name" is required, "id:" plus an ID can also be used instead. So these could both be valid:
town-square
id:123abc456def789ghi012jkl34
Hint: JSON output can be filtered on the command line with jq(1).
""".strip()
argparser = argparse.ArgumentParser(
prog_name, description=description, epilog=epilog,
formatter_class=argparse.RawTextHelpFormatter
)
argparser.add_argument("-i", "--ids", help="use IDs instead of names", action="store_true")
argparser.add_argument(
"--format", help="output format; only json has all fields; default: json", choices=FORMATTERS, default="json")
argparser.add_argument(
"--server",
help=f"e.g.: mattermost.example.org; example.org/mattermost; envvar: {ENVVAR_SERVER}",
default=os.getenv(ENVVAR_SERVER))
subparsers = argparser.add_subparsers(title="actions", dest="action", required=True)
parser_login = subparsers.add_parser("login", help="retrieve an access token")
parser_login.add_argument("login_id", help="username or email", default=os.getenv(ENVVAR_USERNAME))
parser_login.add_argument("--password", default=os.getenv(ENVVAR_PASSWORD))
parser_login.add_argument("--totp", default=os.getenv(ENVVAR_TOTP))
parser_cat = subparsers.add_parser("lschan", help="list channels")
parser_cat.add_argument("-f", "--follow", action="store_true", help="keep running, printing changes to channels as they come in")
# TODO support multiple channels
# parser_cat = subparsers.add_parser("cat", help="list messages in channel(s)")
# parser_cat.add_argument(
# "channels", nargs="+", help="URL names of team and channel: '<team>/<channel>'")
parser_cat = subparsers.add_parser("cat", help="list messages in channel")
parser_cat.add_argument("channel", help="URL names of team and channel: '<team>/<channel>'")
# ---
parser_cat.add_argument("--after", help="all after post with ID")
parser_cat.add_argument("--since", help="all after timestamp")
parser_cat.add_argument("-f", "--follow", action="store_true", help="keep running, printing new posts as they come in")
parser_send = subparsers.add_parser("send", help="send message(s)")
parser_send.add_argument(
"--channel", help="URL names of team and channel: '<team>/<channel>'; if not provided, "
"messages must be provided on stdin and each must specify channel")
parser_send.add_argument(
"--message", help="message; if not provided, messages will be expected on stdin")
parser_send.add_argument(
"--attach", nargs="+", help="filename of file to attach")
parsed = argparser.parse_args()
if not parsed.server:
argparser.error(
f"server is required; use argument --server or environment variable {ENVVAR_SERVER}")
access_token = os.getenv(ENVVAR_ACCESSTOKEN)
if ACTIONS[parsed.action].get("accesstoken_required", True) and not access_token:
argparser.error(
f"`{prog_name} {parsed.action}` requires access token; get one with `{prog_name} login` "
f"and set environment variable {ENVVAR_ACCESSTOKEN}")
server = parsed.server if re.match(r"^[a-z]+://", parsed.server) else f"https://{parsed.server}"
mm_api = mattermost.MMApi(f"{server}/api")
mm_api.access_token = access_token
if access_token:
mm_api._headers.update({"Authorization": f"Bearer {access_token}"})
ACTIONS[parsed.action]["function"](mm_api, parsed)
if __name__ == "__main__":
main()

View file

View file

@ -1,665 +0,0 @@
#!/usr/bin/env python3
import sys
import argparse
import os
import json
from typing import Dict, Optional, List, Iterable
import re
from time import sleep
import threading
import mattermost
from .parsedt import parse_datetime_to_utc
from .mmws import MMws
class NotFound(Exception):
def __init__(self, type_: str, name: str):
super().__init__(f"{type_} {name} not found")
self.type = type_
self.name = name
def first(iterable, default=None):
for x in iterable:
return x
return default
def yes_no(x):
return "yes" if x else "no"
def http_to_ws(url):
"""
Transform url from http to ws and https to wss
"""
assert url.startswith("http://") or url.startswith("https://")
return "ws" + url[4:]
def warn_if_tty(you_can_type="the message objects", write_message_to=sys.stderr):
if sys.stdin.isatty():
print(f"Reading from tty. (You can type {you_can_type} below. Or maybe you meant to redirect something to stdin.)", file=write_message_to)
def get_posts_for_channel(self, channel_id: str, progress=lambda x: None, after=None, since=None, **kwargs) -> Iterable[Dict]:
"""
@raises ApiException: Passed on from lower layers.
"""
per_page = 200
page = 0
total = 0
# if after and since:
# raise ValueError("after and since cannot be used together")
if since:
raise Exception("'since' functionality is broken in the API and behaves non-deterministically. It cannot be meaningfully used.")
# Posts in channel updated after a given timestamp: pagination is broken in the API
# current_since = since
# while True:
# data_page = self._get(f"/v4/channels/{channel_id}/posts", params={"since": current_since, **kwargs})
# order = data_page["order"]
# yield from (
# data_page["posts"][post_id]
# for post_id in reversed(order)
# )
# total += len(order)
# progress(total)
# if len(order) < 1000: # For some reason the pages go up to 1000 posts if 'since' is given
# break
# current_since = data_page["posts"][order[0]]["create_at"]
# sleep(0.1)
elif after:
# Posts in channel after a given ID: API gives pages with OLDEST messages first, so we can
# yield each page when it is fetched
while True:
data_page = self._get(f"/v4/channels/{channel_id}/posts", params={"page": page, "per_page": per_page, "after": after, **kwargs})
order = data_page["order"]
yield from (
data_page["posts"][post_id]
for post_id in reversed(order)
)
total += len(order)
progress(total)
if len(order) < per_page:
break
page += 1
sleep(0.1)
else:
# All posts in channel: API gives pages with NEWEST messages first, so reverse the order in
# the end (and don't reverse the order of each page separately)
posts = []
while True:
data_page = self._get(f"/v4/channels/{channel_id}/posts", params={"page": page, "per_page": per_page, **kwargs})
order = data_page["order"]
posts.extend(
data_page["posts"][post_id]
for post_id in order
)
progress(len(posts))
if len(order) < per_page:
break
page += 1
sleep(0.1)
yield from reversed(posts)
ID_PREFIX = "id:"
def predicate_for_query(query: str):
"""
@return: a function that returns whether `query` matches its argument
"""
if query.startswith(ID_PREFIX):
id_ = query[len(ID_PREFIX):]
return lambda x: x["id"] == id_
else:
return lambda x: x["name"] == query
def resolve_team(mm_api: mattermost.MMApi, query: str) -> Optional[Dict]:
return first(
filter(
predicate_for_query(query),
mm_api.get_teams()
),
None
)
def resolve_channel(mm_api: mattermost.MMApi, team_id: str, query: str) -> Optional[Dict]:
public_channel_result = first(
filter(
predicate_for_query(query),
mm_api.get_team_channels(team_id)
),
None
)
if public_channel_result is not None:
return public_channel_result
joined_channel_result = first(
filter(
predicate_for_query(query),
mm_api.get_channels_for_user("me", team_id, params={"include_deleted":True})
),
None
)
return joined_channel_result
def resolve_team_channel(mm_api: mattermost.MMApi, query: str) -> Dict:
query_parts = query.split("/")
del query
if len(query_parts) != 2:
raise ValueError("Team/channel ID should be '<team>/<channel>'")
team = resolve_team(mm_api, query_parts[0])
if not team:
raise NotFound("team", query_parts[0])
if query_parts[1].startswith("id:"):
channel = mm_api.get_channel(query_parts[1][3:])
else:
channel = resolve_channel(mm_api, team["id"], query_parts[1])
if not channel:
raise NotFound("channel", query_parts[1])
return team, channel
def login(mm_api, cmdline_args):
print(
f"Logging in as {cmdline_args.login_id!r}; password provided: {yes_no(cmdline_args.password)}; "
f"TOTP token provided: {yes_no(cmdline_args.totp)}",
file=sys.stderr)
mm_api.login(cmdline_args.login_id, cmdline_args.password, cmdline_args.totp)
if cmdline_args.format == "json":
print(json.dumps({"token": mm_api._bearer}))
elif cmdline_args.format == "tsv":
print(mm_api._bearer)
else:
assert False
def cat(mm_api: mattermost.MMApi, cmdline_args):
# channels = [
# resolve_team_channel(mm_api, query)
# for query in cmdline_args.channels
# ]
team, channel = resolve_team_channel(mm_api, cmdline_args.channel)
if not cmdline_args.ids:
users = list(mm_api.get_users())
def attribute(key_value):
key, value = key_value
if key == "channel_id":
assert value == channel["id"]
return "channel", channel["name"]
if key == "user_id":
return "username", first(u["username"] for u in users if u["id"] == value)
return key_value
else:
def attribute(key_value):
return key_value
# In a list to allow overwriting from within print_initial_messages without using global
backlog = [ [] ]
backlog_lock = threading.Lock()
def print_initial_messages():
posts = get_posts_for_channel(mm_api, channel["id"], after=cmdline_args.after, since=cmdline_args.since)
for post in posts:
print(str_for_post(attribute, post, cmdline_args))
with backlog_lock:
for post in backlog[0]:
print(str_for_post(attribute, post, cmdline_args))
backlog[0] = None
if cmdline_args.follow:
def simple_websocket_callback(_mmws, event_data):
if event_data.get("event") == "posted":
post = json.loads(event_data["data"]["post"])
if post["channel_id"] != channel["id"]:
return
print(str_for_post(attribute, post, cmdline_args), flush=True)
def initial_websocket_callback(mmws: MMws, event_data):
if event_data.get("event") == "posted":
post = json.loads(event_data["data"]["post"])
if post["channel_id"] != channel["id"]:
return
with backlog_lock:
if backlog[0] is not None:
backlog[0].append(post)
return
else:
mmws.ws_handler = simple_websocket_callback
simple_websocket_callback(mmws, event_data)
ws_url = http_to_ws(mm_api._url) + "/v4/websocket"
mmws = MMws(initial_websocket_callback, mm_api.access_token, ws_url)
thread = threading.Thread(target=print_initial_messages)
thread.setDaemon(True)
thread.start()
mmws.run_websocket()
else:
print_initial_messages()
def tail(mm_api: mattermost.MMApi, cmdline_args):
team, channel = resolve_team_channel(mm_api, cmdline_args.channel)
if not cmdline_args.ids:
users = list(mm_api.get_users())
def attribute(key_value):
key, value = key_value
if key == "channel_id":
assert value == channel["id"]
return "channel", channel["name"]
if key == "user_id":
return "username", first(u["username"] for u in users if u["id"] == value)
return key_value
else:
def attribute(key_value):
return key_value
# In a list to allow overwriting from within print_initial_messages without using global
backlog = [ [] ]
backlog_lock = threading.Lock()
def print_initial_messages():
data_page = mm_api._get(f"/v4/channels/{channel['id']}/posts")
order = data_page["order"]
posts = [
data_page["posts"][post_id]
for post_id in reversed(order)
]
for post in posts:
print(str_for_post(attribute, post, cmdline_args))
with backlog_lock:
for post in backlog[0]:
print(str_for_post(attribute, post, cmdline_args))
backlog[0] = None
if cmdline_args.follow:
def simple_websocket_callback(_mmws, event_data):
if event_data.get("event") == "posted":
post = json.loads(event_data["data"]["post"])
if post["channel_id"] != channel["id"]:
return
print(str_for_post(attribute, post, cmdline_args), flush=True)
def initial_websocket_callback(mmws: MMws, event_data):
if event_data.get("event") == "posted":
post = json.loads(event_data["data"]["post"])
if post["channel_id"] != channel["id"]:
return
with backlog_lock:
if backlog[0] is not None:
backlog[0].append(post)
return
else:
mmws.ws_handler = simple_websocket_callback
simple_websocket_callback(mmws, event_data)
ws_url = http_to_ws(mm_api._url) + "/v4/websocket"
mmws = MMws(initial_websocket_callback, mm_api.access_token, ws_url)
thread = threading.Thread(target=print_initial_messages)
thread.setDaemon(True)
thread.start()
mmws.run_websocket()
else:
print_initial_messages()
def ls(mm_api: mattermost.MMApi, cmdline_args):
# TODO --follow doesn't work for channel creation and deletion yet
# In a list to allow overwriting from within print_initial_channels without using global
backlog = [ [] ]
backlog_lock = threading.Lock()
team = resolve_team(mm_api, cmdline_args.team)
events = {"channel_converted", "channel_created", "channel_deleted", "channel_updated"}
def print_initial_channels():
for channel in mm_api.get_team_channels(team["id"]):
print(str_for_chan(lambda x: x, channel, cmdline_args))
with backlog_lock:
for channel in backlog[0]:
print(str_for_chan(lambda x: x, channel, cmdline_args))
backlog[0] = None
if cmdline_args.follow:
def simple_websocket_callback(_mmws, event_data):
if event_data.get("event") in events:
channel = json.loads(event_data["data"]["channel"])
if channel["team_id"] != team["id"]:
return
print(str_for_chan(lambda x: x, channel, cmdline_args))
def initial_websocket_callback(mmws: MMws, event_data):
if event_data.get("event") in events:
channel = json.loads(event_data["data"]["channel"])
if channel["team_id"] != team["id"]:
return
with backlog_lock:
if backlog[0] is not None:
backlog[0].append(channel)
return
else:
mmws.ws_handler = simple_websocket_callback
simple_websocket_callback(mmws, event_data)
ws_url = http_to_ws(mm_api._url) + "/v4/websocket"
mmws = MMws(initial_websocket_callback, mm_api.access_token, ws_url)
thread = threading.Thread(target=print_initial_channels)
thread.setDaemon(True)
thread.start()
mmws.run_websocket()
else:
print_initial_channels()
def send(mm_api: mattermost.MMApi, cmdline_args):
read_stdin = cmdline_args.message is None or cmdline_args.channel is None
team, channel = resolve_team_channel(mm_api, cmdline_args.channel) if cmdline_args.channel is not None else (None, None)
if read_stdin:
warn_if_tty()
for line in sys.stdin:
msg = json.loads(line)
if "channel_id" in msg:
channel_id = msg["channel_id"]
elif "channel" in msg:
_, local_channel = resolve_team_channel(mm_api, msg["channel"])
channel_id = local_channel["id"]
elif channel is not None:
channel_id = channel["id"]
else:
print(f"Illegal message, missing channel: {line.strip()}", file=sys.stderr)
raise ValueError("Illegal message, missing channel")
sent = mm_api.create_post(channel_id, msg["message"], filepaths=msg.get("attachments", msg.get("attachments")), root_id=msg.get("thread", msg.get("root_id")))
print(sent)
else:
sent = mm_api.create_post(channel["id"], cmdline_args.message, filepaths=cmdline_args.attach, root_id=cmdline_args.thread)
print(sent)
def rm(mm_api: mattermost.MMApi, cmdline_args):
mm_api.delete_post(cmdline_args.msgid)
def edit(mm_api: mattermost.MMApi, cmdline_args):
if cmdline_args.message is None:
warn_if_tty(you_can_type="the new message text")
new_text = sys.stdin.read()
else:
new_text = cmdline_args.message
mm_api.patch_post(cmdline_args.msgid, message=new_text)
def status(mm_api: mattermost.MMApi, cmdline_args):
if not cmdline_args.status:
raise ValueError("No status selected")
# This API endpoint requires the user ID to be passed explicitly in the request body,
# duplicating the info in the URL. But "me" does not suffice here.
my_user_id = mm_api.get_user()["id"]
mm_api._put(f"/v4/users/me/status", data={
"user_id": my_user_id,
"status": cmdline_args.status
})
def customstatus(mm_api: mattermost.MMApi, cmdline_args):
until = parse_datetime_to_utc(cmdline_args.until) if cmdline_args.until else None
if cmdline_args.text or cmdline_args.emoji:
mm_api._put(f"/v4/users/me/status/custom", data={
"emoji": cmdline_args.emoji,
"text": cmdline_args.text,
"expires_at": until.isoformat() if until else None
})
else:
mm_api._delete(f"/v4/users/me/status/custom")
def lastread(mm_api: mattermost.MMApi, cmdline_args):
team, channel = resolve_team_channel(mm_api, cmdline_args.channel) if cmdline_args.channel is not None else (None, None)
assert channel is not None
response = mm_api._get(f"/v4/users/me/channels/{channel['id']}/posts/unread?limit_after=1&limit_before=0")
if response["order"]:
assert len(response["order"]) == 1
last_read_id = response["order"][0]
else:
last_read_id = None
if cmdline_args.format == "json":
print(json.dumps(last_read_id))
if cmdline_args.format == "tsv":
print(last_read_id or "null")
def tsv_escape(text):
return text.replace("\\", "\\\\").replace("\t", r"\t").replace("\n", r"\n")
def str_for_post(attribute, post, cmdline_args):
obj = {
k: v
for k, v in map(attribute, post.items())
if (v or k == "message") and (k != "update_at" or post["update_at"] != post["create_at"])
}
if cmdline_args.format == "json":
return json.dumps(obj)
if cmdline_args.format == "tsv":
msg = tsv_escape(obj.get("message", ""))
return f"{obj['id']}\t{obj['create_at']}\t{obj.get('username') or obj['user_id']}\t{msg}"
assert False
def str_for_chan(attribute, channel, cmdline_args):
obj = {
k: v
for k, v in map(attribute, channel.items())
}
if cmdline_args.format == "json":
return json.dumps(obj)
if cmdline_args.format == "tsv":
# TODO
header = tsv_escape(obj.get("header", ""))
purpose = tsv_escape(obj.get("purpose", ""))
return f"{obj['id']}\t{obj['name']}\t{obj.get('display_name')}\t{obj.get('create_at')}\t{obj.get('delete_at')}\t{purpose}\t{header}"
assert False
ACTIONS = {
"login": {"function": login, "accesstoken_required": False},
"cat": {"function": cat},
"tail": {"function": tail},
"ls": {"function": ls},
"send": {"function": send},
"rm": {"function": rm},
"edit": {"function": edit},
"status": {"function": status},
"customstatus": {"function": customstatus},
"lastread": {"function": lastread},
}
FORMATTERS = { "json", "tsv" }
ENVVAR_SERVER = "MM_SERVER"
ENVVAR_USERNAME = "MM_USERNAME"
ENVVAR_PASSWORD = "MM_PASSWORD"
ENVVAR_TOTP = "MM_TOTP"
ENVVAR_ACCESSTOKEN = "MM_ACCESSTOKEN"
def main():
prog_name = os.path.basename(sys.argv[0])
description = "Interact with Mattermost on the CLI and in scripts"
epilog = f"""
For further help, use `{prog_name} <action> -h`.
Where a "URL name" is required, "id:" plus an ID can also be used instead. So these could both be valid:
town-square
id:123abc456def789ghi012jkl34
Hint: JSON output can be filtered with jq(1).
""".strip()
argparser = argparse.ArgumentParser(
prog_name, description=description, epilog=epilog,
formatter_class=argparse.RawTextHelpFormatter
)
argparser.add_argument("-i", "--ids", help="use IDs instead of names", action="store_true")
argparser.add_argument(
"--format", help="output format; only json has all fields; default: %(default)s", choices=FORMATTERS, default="json")
argparser.add_argument(
"--server",
help=f"e.g.: mattermost.example.org; example.org/mattermost; envvar: {ENVVAR_SERVER}",
default=os.getenv(ENVVAR_SERVER))
subparsers = argparser.add_subparsers(title="actions", dest="action", required=True)
password_argument_warning = f"""
Security note: Other programs and users can typically read which arguments you give to any program. Therefore it strongly advised to use the environment variable (envvar) method when passing the credentials to the program. In many shells you can do so like this:
{ENVVAR_USERNAME}='aiden' {ENVVAR_PASSWORD}='2FifeVg2UGbCETYdaWscf7hmDvUHbp' {prog_name} login
""".strip()
parser_login = subparsers.add_parser(
"login", help="retrieve an access token", epilog=password_argument_warning, formatter_class=argparse.RawTextHelpFormatter)
parser_login.add_argument(
"login_id",
help=f"username or email; envvar: {ENVVAR_USERNAME}",
default=os.getenv(ENVVAR_USERNAME))
parser_login.add_argument(
"--password", help=f"see security note below; envvar: {ENVVAR_PASSWORD}", default=os.getenv(ENVVAR_PASSWORD))
parser_login.add_argument(
"--totp", help=f"see security note below; envvar: {ENVVAR_TOTP}", default=os.getenv(ENVVAR_TOTP))
# TODO support multiple channels
# parser_cat = subparsers.add_parser("cat", help="list messages in channel(s)")
# parser_cat.add_argument(
# "channels", nargs="+", help="URL names of team and channel: '<team>/<channel>'")
parser_cat = subparsers.add_parser("cat", help="list messages in channel")
parser_cat.add_argument("channel", help="URL names of team and channel: '<team>/<channel>'")
# ---
parser_cat.add_argument("--after", help="all after post with ID")
parser_cat.add_argument("--since", help="all after timestamp")
parser_cat.add_argument("-f", "--follow", action="store_true", help="keep running, printing new posts as they come in")
parser_tail = subparsers.add_parser("tail", help="list newest messages in channel")
parser_tail.add_argument("channel", help="URL names of team and channel: '<team>/<channel>'")
parser_tail.add_argument("-f", "--follow", action="store_true", help="keep running, printing new posts as they come in")
parser_ls = subparsers.add_parser("ls", help="list channels")
parser_ls.add_argument("team", help="URL name of team")
parser_ls.add_argument("-f", "--follow", action="store_true", help="keep running, printing changes to channels as they come in")
send_json_format = """
The input format accepted on stdin is one JSON object per line. The possible fields are 'message',
'channel' (URL names of team and channel: '<team>/<channel>'), 'channel_id'
""".strip()
parser_send = subparsers.add_parser(
"send", help="send message(s)",
epilog=send_json_format)
parser_send.add_argument(
"--channel", help="URL names of team and channel: '<team>/<channel>'; if not provided, "
"messages must be provided on stdin and each must specify channel")
parser_send.add_argument(
"--message", help="message; if not provided, messages will be expected on stdin")
parser_send.add_argument(
"--thread", help="message ID of the root message of the thread to post in")
parser_send.add_argument(
"--attach", nargs="+", help="filename of file to attach")
parser_rm = subparsers.add_parser("rm", help="delete message(s)")
parser_rm.add_argument("msgid", help="ID of message to delete")
parser_edit = subparsers.add_parser(
"edit", help="edit message(s)",
epilog="The input accepted on stdin will be used as-is as the new text.")
parser_edit.add_argument("msgid", help="ID of message to edit")
parser_edit.add_argument(
"--message", help="message; if not provided, message will be expected on stdin")
parser_status = subparsers.add_parser("status", help="update user status")
parser_status.add_argument("--online", dest="status", action="store_const", const="online", help="Set status to online")
parser_status.add_argument("--away", dest="status", action="store_const", const="away", help="Set status to away")
parser_status.add_argument("--dnd", dest="status", action="store_const", const="dnd", help="Set status to 'do not disturb'")
parser_status.add_argument("--offline", dest="status", action="store_const", const="offline", help="Set status to offline")
parser_customstatus = subparsers.add_parser("customstatus",
help="update custom user status (emoji and message)")
parser_customstatus.add_argument("--until", help="Datetime of when to clear the custom status")
parser_customstatus.add_argument("--emoji", help="Name of emoji (without colons), e.g. coffee")
parser_customstatus.add_argument("text" , help="Text for the status", nargs="?")
parser_lastread = subparsers.add_parser("lastread", help="last read message in channel; will be null if all messages are read")
parser_lastread.add_argument("channel", help="URL names of team and channel: '<team>/<channel>'")
parsed = argparser.parse_args()
if not parsed.server:
argparser.error(
f"server is required; use argument --server or environment variable {ENVVAR_SERVER}")
access_token = os.getenv(ENVVAR_ACCESSTOKEN)
if ACTIONS[parsed.action].get("accesstoken_required", True) and not access_token:
argparser.error(
f"`{prog_name} {parsed.action}` requires access token; get one with `{prog_name} login` "
f"and set environment variable {ENVVAR_ACCESSTOKEN}")
server = parsed.server if re.match(r"^[a-z]+://", parsed.server) else f"https://{parsed.server}"
mm_api = mattermost.MMApi(f"{server}/api")
mm_api.access_token = access_token
if access_token:
mm_api._headers.update({"Authorization": f"Bearer {access_token}"})
ACTIONS[parsed.action]["function"](mm_api, parsed)
if __name__ == "__main__":
main()

View file

@ -1,18 +0,0 @@
"""
Wrapper around date/time parsing
"""
import datetime
from dateutil.parser import parse
def parse_datetime_to_utc(string) -> datetime.datetime:
parsed = parse(string)
if parsed.tzinfo is None:
# Convert to timezone aware datetime with the system's timezone
converted = parsed.astimezone()
else:
converted = parsed
return converted.astimezone(datetime.timezone.utc)

View file

@ -1,52 +0,0 @@
[project]
name = "mmcli"
version = "0.0.1"
authors = [
{ name="Midgard", email="midgard@zeus.ugent.be" },
]
description = "Mattermost command-line interface"
readme = "README.md"
# Choose from the list at https://pypi.org/classifiers/
classifiers = [
"Programming Language :: Python :: 3",
"License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)",
"Operating System :: OS Independent",
"Natural Language :: English",
"Environment :: Console",
#"Development Status :: 1 - Planning",
#"Development Status :: 2 - Pre-Alpha",
"Development Status :: 3 - Alpha",
#"Development Status :: 4 - Beta",
#"Development Status :: 5 - Production/Stable",
#"Development Status :: 6 - Mature",
#"Development Status :: 7 - Inactive",
"Intended Audience :: End Users/Desktop",
"Topic :: Utilities",
]
requires-python = ">=3.7"
dependencies = [
"python-dateutil >= 2.8.2, < 3.0.0",
"mattermost >= 5.33.0",
"websocket_client",
]
[project.urls]
#"Homepage" = ""
"Source" = "https://git.zeus.gent/midgard/mmcli"
"Change log" = "https://git.zeus.gent/midgard/mmcli/-/blob/master/CHANGELOG.md"
"Bug tracker" = "https://git.zeus.gent/midgard/mmcli/-/issues"
[build-system]
requires = ["setuptools"]
build-backend = "setuptools.build_meta"
[project.scripts]
# executable-name = "package.subpackage.module:function"
mmcli = "mmcli.mmcli:main"
[tool.setuptools.packages]
find = {}

View file

@ -1,184 +0,0 @@
#
# This file is autogenerated. To update, run:
# tools/update_requirements.sh
#
certifi==2023.5.7 \
--hash=sha256:0f0d56dc5a6ad56fd4ba36484d6cc34451e1c6548c61daad8c320169f91eddc7 \
--hash=sha256:c6c2e98f5c7869efca1f8916fed228dd91539f9f1b444c314c06eef02980c716
# via requests
charset-normalizer==3.1.0 \
--hash=sha256:04afa6387e2b282cf78ff3dbce20f0cc071c12dc8f685bd40960cc68644cfea6 \
--hash=sha256:04eefcee095f58eaabe6dc3cc2262f3bcd776d2c67005880894f447b3f2cb9c1 \
--hash=sha256:0be65ccf618c1e7ac9b849c315cc2e8a8751d9cfdaa43027d4f6624bd587ab7e \
--hash=sha256:0c95f12b74681e9ae127728f7e5409cbbef9cd914d5896ef238cc779b8152373 \
--hash=sha256:0ca564606d2caafb0abe6d1b5311c2649e8071eb241b2d64e75a0d0065107e62 \
--hash=sha256:10c93628d7497c81686e8e5e557aafa78f230cd9e77dd0c40032ef90c18f2230 \
--hash=sha256:11d117e6c63e8f495412d37e7dc2e2fff09c34b2d09dbe2bee3c6229577818be \
--hash=sha256:11d3bcb7be35e7b1bba2c23beedac81ee893ac9871d0ba79effc7fc01167db6c \
--hash=sha256:12a2b561af122e3d94cdb97fe6fb2bb2b82cef0cdca131646fdb940a1eda04f0 \
--hash=sha256:12d1a39aa6b8c6f6248bb54550efcc1c38ce0d8096a146638fd4738e42284448 \
--hash=sha256:1435ae15108b1cb6fffbcea2af3d468683b7afed0169ad718451f8db5d1aff6f \
--hash=sha256:1c60b9c202d00052183c9be85e5eaf18a4ada0a47d188a83c8f5c5b23252f649 \
--hash=sha256:1e8fcdd8f672a1c4fc8d0bd3a2b576b152d2a349782d1eb0f6b8e52e9954731d \
--hash=sha256:20064ead0717cf9a73a6d1e779b23d149b53daf971169289ed2ed43a71e8d3b0 \
--hash=sha256:21fa558996782fc226b529fdd2ed7866c2c6ec91cee82735c98a197fae39f706 \
--hash=sha256:22908891a380d50738e1f978667536f6c6b526a2064156203d418f4856d6e86a \
--hash=sha256:3160a0fd9754aab7d47f95a6b63ab355388d890163eb03b2d2b87ab0a30cfa59 \
--hash=sha256:322102cdf1ab682ecc7d9b1c5eed4ec59657a65e1c146a0da342b78f4112db23 \
--hash=sha256:34e0a2f9c370eb95597aae63bf85eb5e96826d81e3dcf88b8886012906f509b5 \
--hash=sha256:3573d376454d956553c356df45bb824262c397c6e26ce43e8203c4c540ee0acb \
--hash=sha256:3747443b6a904001473370d7810aa19c3a180ccd52a7157aacc264a5ac79265e \
--hash=sha256:38e812a197bf8e71a59fe55b757a84c1f946d0ac114acafaafaf21667a7e169e \
--hash=sha256:3a06f32c9634a8705f4ca9946d667609f52cf130d5548881401f1eb2c39b1e2c \
--hash=sha256:3a5fc78f9e3f501a1614a98f7c54d3969f3ad9bba8ba3d9b438c3bc5d047dd28 \
--hash=sha256:3d9098b479e78c85080c98e1e35ff40b4a31d8953102bb0fd7d1b6f8a2111a3d \
--hash=sha256:3dc5b6a8ecfdc5748a7e429782598e4f17ef378e3e272eeb1340ea57c9109f41 \
--hash=sha256:4155b51ae05ed47199dc5b2a4e62abccb274cee6b01da5b895099b61b1982974 \
--hash=sha256:49919f8400b5e49e961f320c735388ee686a62327e773fa5b3ce6721f7e785ce \
--hash=sha256:53d0a3fa5f8af98a1e261de6a3943ca631c526635eb5817a87a59d9a57ebf48f \
--hash=sha256:5f008525e02908b20e04707a4f704cd286d94718f48bb33edddc7d7b584dddc1 \
--hash=sha256:628c985afb2c7d27a4800bfb609e03985aaecb42f955049957814e0491d4006d \
--hash=sha256:65ed923f84a6844de5fd29726b888e58c62820e0769b76565480e1fdc3d062f8 \
--hash=sha256:6734e606355834f13445b6adc38b53c0fd45f1a56a9ba06c2058f86893ae8017 \
--hash=sha256:6baf0baf0d5d265fa7944feb9f7451cc316bfe30e8df1a61b1bb08577c554f31 \
--hash=sha256:6f4f4668e1831850ebcc2fd0b1cd11721947b6dc7c00bf1c6bd3c929ae14f2c7 \
--hash=sha256:6f5c2e7bc8a4bf7c426599765b1bd33217ec84023033672c1e9a8b35eaeaaaf8 \
--hash=sha256:6f6c7a8a57e9405cad7485f4c9d3172ae486cfef1344b5ddd8e5239582d7355e \
--hash=sha256:7381c66e0561c5757ffe616af869b916c8b4e42b367ab29fedc98481d1e74e14 \
--hash=sha256:73dc03a6a7e30b7edc5b01b601e53e7fc924b04e1835e8e407c12c037e81adbd \
--hash=sha256:74db0052d985cf37fa111828d0dd230776ac99c740e1a758ad99094be4f1803d \
--hash=sha256:75f2568b4189dda1c567339b48cba4ac7384accb9c2a7ed655cd86b04055c795 \
--hash=sha256:78cacd03e79d009d95635e7d6ff12c21eb89b894c354bd2b2ed0b4763373693b \
--hash=sha256:80d1543d58bd3d6c271b66abf454d437a438dff01c3e62fdbcd68f2a11310d4b \
--hash=sha256:830d2948a5ec37c386d3170c483063798d7879037492540f10a475e3fd6f244b \
--hash=sha256:891cf9b48776b5c61c700b55a598621fdb7b1e301a550365571e9624f270c203 \
--hash=sha256:8f25e17ab3039b05f762b0a55ae0b3632b2e073d9c8fc88e89aca31a6198e88f \
--hash=sha256:9a3267620866c9d17b959a84dd0bd2d45719b817245e49371ead79ed4f710d19 \
--hash=sha256:a04f86f41a8916fe45ac5024ec477f41f886b3c435da2d4e3d2709b22ab02af1 \
--hash=sha256:aaf53a6cebad0eae578f062c7d462155eada9c172bd8c4d250b8c1d8eb7f916a \
--hash=sha256:abc1185d79f47c0a7aaf7e2412a0eb2c03b724581139193d2d82b3ad8cbb00ac \
--hash=sha256:ac0aa6cd53ab9a31d397f8303f92c42f534693528fafbdb997c82bae6e477ad9 \
--hash=sha256:ac3775e3311661d4adace3697a52ac0bab17edd166087d493b52d4f4f553f9f0 \
--hash=sha256:b06f0d3bf045158d2fb8837c5785fe9ff9b8c93358be64461a1089f5da983137 \
--hash=sha256:b116502087ce8a6b7a5f1814568ccbd0e9f6cfd99948aa59b0e241dc57cf739f \
--hash=sha256:b82fab78e0b1329e183a65260581de4375f619167478dddab510c6c6fb04d9b6 \
--hash=sha256:bd7163182133c0c7701b25e604cf1611c0d87712e56e88e7ee5d72deab3e76b5 \
--hash=sha256:c36bcbc0d5174a80d6cccf43a0ecaca44e81d25be4b7f90f0ed7bcfbb5a00909 \
--hash=sha256:c3af8e0f07399d3176b179f2e2634c3ce9c1301379a6b8c9c9aeecd481da494f \
--hash=sha256:c84132a54c750fda57729d1e2599bb598f5fa0344085dbde5003ba429a4798c0 \
--hash=sha256:cb7b2ab0188829593b9de646545175547a70d9a6e2b63bf2cd87a0a391599324 \
--hash=sha256:cca4def576f47a09a943666b8f829606bcb17e2bc2d5911a46c8f8da45f56755 \
--hash=sha256:cf6511efa4801b9b38dc5546d7547d5b5c6ef4b081c60b23e4d941d0eba9cbeb \
--hash=sha256:d16fd5252f883eb074ca55cb622bc0bee49b979ae4e8639fff6ca3ff44f9f854 \
--hash=sha256:d2686f91611f9e17f4548dbf050e75b079bbc2a82be565832bc8ea9047b61c8c \
--hash=sha256:d7fc3fca01da18fbabe4625d64bb612b533533ed10045a2ac3dd194bfa656b60 \
--hash=sha256:dd5653e67b149503c68c4018bf07e42eeed6b4e956b24c00ccdf93ac79cdff84 \
--hash=sha256:de5695a6f1d8340b12a5d6d4484290ee74d61e467c39ff03b39e30df62cf83a0 \
--hash=sha256:e0ac8959c929593fee38da1c2b64ee9778733cdf03c482c9ff1d508b6b593b2b \
--hash=sha256:e1b25e3ad6c909f398df8921780d6a3d120d8c09466720226fc621605b6f92b1 \
--hash=sha256:e633940f28c1e913615fd624fcdd72fdba807bf53ea6925d6a588e84e1151531 \
--hash=sha256:e89df2958e5159b811af9ff0f92614dabf4ff617c03a4c1c6ff53bf1c399e0e1 \
--hash=sha256:ea9f9c6034ea2d93d9147818f17c2a0860d41b71c38b9ce4d55f21b6f9165a11 \
--hash=sha256:f645caaf0008bacf349875a974220f1f1da349c5dbe7c4ec93048cdc785a3326 \
--hash=sha256:f8303414c7b03f794347ad062c0516cee0e15f7a612abd0ce1e25caf6ceb47df \
--hash=sha256:fca62a8301b605b954ad2e9c3666f9d97f63872aa4efcae5492baca2056b74ab
# via requests
idna==3.4 \
--hash=sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4 \
--hash=sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2
# via requests
mattermost==6.5.0 \
--hash=sha256:738270df4deb987d93c3778aa4b58bd037e8ee891a755d1faded3b8101e3cb8c
# via mmcli (pyproject.toml)
python-dateutil==2.8.2 \
--hash=sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86 \
--hash=sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9
# via mmcli (pyproject.toml)
requests==2.30.0 \
--hash=sha256:10e94cc4f3121ee6da529d358cdaeaff2f1c409cd377dbc72b825852f2f7e294 \
--hash=sha256:239d7d4458afcb28a692cdd298d87542235f4ca8d36d03a15bfc128a6559a2f4
# via mattermost
six==1.16.0 \
--hash=sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926 \
--hash=sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254
# via python-dateutil
urllib3==2.0.2 \
--hash=sha256:61717a1095d7e155cdb737ac7bb2f4324a858a1e2e6466f6d03ff630ca68d3cc \
--hash=sha256:d055c2f9d38dc53c808f6fdc8eab7360b6fdbbde02340ed25cfbcd817c62469e
# via requests
websocket-client==1.5.1 \
--hash=sha256:3f09e6d8230892547132177f575a4e3e73cfdf06526e20cc02aa1c3b47184d40 \
--hash=sha256:cdf5877568b7e83aa7cf2244ab56a3213de587bbe0ce9d8b9600fc77b455d89e
# via mmcli (pyproject.toml)
websockets==11.0.3 \
--hash=sha256:01f5567d9cf6f502d655151645d4e8b72b453413d3819d2b6f1185abc23e82dd \
--hash=sha256:03aae4edc0b1c68498f41a6772d80ac7c1e33c06c6ffa2ac1c27a07653e79d6f \
--hash=sha256:0ac56b661e60edd453585f4bd68eb6a29ae25b5184fd5ba51e97652580458998 \
--hash=sha256:0ee68fe502f9031f19d495dae2c268830df2760c0524cbac5d759921ba8c8e82 \
--hash=sha256:1553cb82942b2a74dd9b15a018dce645d4e68674de2ca31ff13ebc2d9f283788 \
--hash=sha256:1a073fc9ab1c8aff37c99f11f1641e16da517770e31a37265d2755282a5d28aa \
--hash=sha256:1d2256283fa4b7f4c7d7d3e84dc2ece74d341bce57d5b9bf385df109c2a1a82f \
--hash=sha256:1d5023a4b6a5b183dc838808087033ec5df77580485fc533e7dab2567851b0a4 \
--hash=sha256:1fdf26fa8a6a592f8f9235285b8affa72748dc12e964a5518c6c5e8f916716f7 \
--hash=sha256:2529338a6ff0eb0b50c7be33dc3d0e456381157a31eefc561771ee431134a97f \
--hash=sha256:279e5de4671e79a9ac877427f4ac4ce93751b8823f276b681d04b2156713b9dd \
--hash=sha256:2d903ad4419f5b472de90cd2d40384573b25da71e33519a67797de17ef849b69 \
--hash=sha256:332d126167ddddec94597c2365537baf9ff62dfcc9db4266f263d455f2f031cb \
--hash=sha256:34fd59a4ac42dff6d4681d8843217137f6bc85ed29722f2f7222bd619d15e95b \
--hash=sha256:3580dd9c1ad0701169e4d6fc41e878ffe05e6bdcaf3c412f9d559389d0c9e016 \
--hash=sha256:3ccc8a0c387629aec40f2fc9fdcb4b9d5431954f934da3eaf16cdc94f67dbfac \
--hash=sha256:41f696ba95cd92dc047e46b41b26dd24518384749ed0d99bea0a941ca87404c4 \
--hash=sha256:42cc5452a54a8e46a032521d7365da775823e21bfba2895fb7b77633cce031bb \
--hash=sha256:4841ed00f1026dfbced6fca7d963c4e7043aa832648671b5138008dc5a8f6d99 \
--hash=sha256:4b253869ea05a5a073ebfdcb5cb3b0266a57c3764cf6fe114e4cd90f4bfa5f5e \
--hash=sha256:54c6e5b3d3a8936a4ab6870d46bdd6ec500ad62bde9e44462c32d18f1e9a8e54 \
--hash=sha256:619d9f06372b3a42bc29d0cd0354c9bb9fb39c2cbc1a9c5025b4538738dbffaf \
--hash=sha256:6505c1b31274723ccaf5f515c1824a4ad2f0d191cec942666b3d0f3aa4cb4007 \
--hash=sha256:660e2d9068d2bedc0912af508f30bbeb505bbbf9774d98def45f68278cea20d3 \
--hash=sha256:6681ba9e7f8f3b19440921e99efbb40fc89f26cd71bf539e45d8c8a25c976dc6 \
--hash=sha256:68b977f21ce443d6d378dbd5ca38621755f2063d6fdb3335bda981d552cfff86 \
--hash=sha256:69269f3a0b472e91125b503d3c0b3566bda26da0a3261c49f0027eb6075086d1 \
--hash=sha256:6f1a3f10f836fab6ca6efa97bb952300b20ae56b409414ca85bff2ad241d2a61 \
--hash=sha256:7622a89d696fc87af8e8d280d9b421db5133ef5b29d3f7a1ce9f1a7bf7fcfa11 \
--hash=sha256:777354ee16f02f643a4c7f2b3eff8027a33c9861edc691a2003531f5da4f6bc8 \
--hash=sha256:84d27a4832cc1a0ee07cdcf2b0629a8a72db73f4cf6de6f0904f6661227f256f \
--hash=sha256:8531fdcad636d82c517b26a448dcfe62f720e1922b33c81ce695d0edb91eb931 \
--hash=sha256:86d2a77fd490ae3ff6fae1c6ceaecad063d3cc2320b44377efdde79880e11526 \
--hash=sha256:88fc51d9a26b10fc331be344f1781224a375b78488fc343620184e95a4b27016 \
--hash=sha256:8a34e13a62a59c871064dfd8ffb150867e54291e46d4a7cf11d02c94a5275bae \
--hash=sha256:8c82f11964f010053e13daafdc7154ce7385ecc538989a354ccc7067fd7028fd \
--hash=sha256:92b2065d642bf8c0a82d59e59053dd2fdde64d4ed44efe4870fa816c1232647b \
--hash=sha256:97b52894d948d2f6ea480171a27122d77af14ced35f62e5c892ca2fae9344311 \
--hash=sha256:9d9acd80072abcc98bd2c86c3c9cd4ac2347b5a5a0cae7ed5c0ee5675f86d9af \
--hash=sha256:9f59a3c656fef341a99e3d63189852be7084c0e54b75734cde571182c087b152 \
--hash=sha256:aa5003845cdd21ac0dc6c9bf661c5beddd01116f6eb9eb3c8e272353d45b3288 \
--hash=sha256:b16fff62b45eccb9c7abb18e60e7e446998093cdcb50fed33134b9b6878836de \
--hash=sha256:b30c6590146e53149f04e85a6e4fcae068df4289e31e4aee1fdf56a0dead8f97 \
--hash=sha256:b58cbf0697721120866820b89f93659abc31c1e876bf20d0b3d03cef14faf84d \
--hash=sha256:b67c6f5e5a401fc56394f191f00f9b3811fe843ee93f4a70df3c389d1adf857d \
--hash=sha256:bceab846bac555aff6427d060f2fcfff71042dba6f5fca7dc4f75cac815e57ca \
--hash=sha256:bee9fcb41db2a23bed96c6b6ead6489702c12334ea20a297aa095ce6d31370d0 \
--hash=sha256:c114e8da9b475739dde229fd3bc6b05a6537a88a578358bc8eb29b4030fac9c9 \
--hash=sha256:c1f0524f203e3bd35149f12157438f406eff2e4fb30f71221c8a5eceb3617b6b \
--hash=sha256:c792ea4eabc0159535608fc5658a74d1a81020eb35195dd63214dcf07556f67e \
--hash=sha256:c7f3cb904cce8e1be667c7e6fef4516b98d1a6a0635a58a57528d577ac18a128 \
--hash=sha256:d67ac60a307f760c6e65dad586f556dde58e683fab03323221a4e530ead6f74d \
--hash=sha256:dcacf2c7a6c3a84e720d1bb2b543c675bf6c40e460300b628bab1b1efc7c034c \
--hash=sha256:de36fe9c02995c7e6ae6efe2e205816f5f00c22fd1fbf343d4d18c3d5ceac2f5 \
--hash=sha256:def07915168ac8f7853812cc593c71185a16216e9e4fa886358a17ed0fd9fcf6 \
--hash=sha256:df41b9bc27c2c25b486bae7cf42fccdc52ff181c8c387bfd026624a491c2671b \
--hash=sha256:e052b8467dd07d4943936009f46ae5ce7b908ddcac3fda581656b1b19c083d9b \
--hash=sha256:e063b1865974611313a3849d43f2c3f5368093691349cf3c7c8f8f75ad7cb280 \
--hash=sha256:e1459677e5d12be8bbc7584c35b992eea142911a6236a3278b9b5ce3326f282c \
--hash=sha256:e1a99a7a71631f0efe727c10edfba09ea6bee4166a6f9c19aafb6c0b5917d09c \
--hash=sha256:e590228200fcfc7e9109509e4d9125eace2042fd52b595dd22bbc34bb282307f \
--hash=sha256:e6316827e3e79b7b8e7d8e3b08f4e331af91a48e794d5d8b099928b6f0b85f20 \
--hash=sha256:e7837cb169eca3b3ae94cc5787c4fed99eef74c0ab9506756eea335e0d6f3ed8 \
--hash=sha256:e848f46a58b9fcf3d06061d17be388caf70ea5b8cc3466251963c8345e13f7eb \
--hash=sha256:ed058398f55163a79bb9f06a90ef9ccc063b204bb346c4de78efc5d15abfe602 \
--hash=sha256:f2e58f2c36cc52d41f2659e4c0cbf7353e28c8c9e63e30d8c6d3494dc9fdedcf \
--hash=sha256:f467ba0050b7de85016b43f5a22b46383ef004c4f672148a8abf32bc999a87f0 \
--hash=sha256:f61bdb1df43dc9c131791fbc2355535f9024b9a04398d3bd0684fc16ab07df74 \
--hash=sha256:fb06eea71a00a7af0ae6aefbb932fb8a7df3cb390cc217d51a9ad7343de1b8d0 \
--hash=sha256:ffd7dcaf744f25f82190856bc26ed81721508fc5cbf2a330751e135ff1283564
# via mattermost

View file

@ -1,4 +0,0 @@
#!/bin/sh
cd "`dirname "$0"`"/..
rm -rf ./build/ ./*.egg-info/ ./dist/ ./__pycache__/ ./*/__pycache__/

View file

@ -1,8 +0,0 @@
#!/bin/sh
cd "`dirname "$0"`"/..
# Create virtualenv
python3 -m virtualenv venv/
# Install dependencies
venv/bin/pip install -e .

View file

@ -1,43 +0,0 @@
#!/bin/bash
set -e
cd $(dirname "$0")/..
tools/test.sh
if [ ! -t 0 ] ; then
echo "release.sh should be run with a terminal attached to stdin" >&2
exit 1
fi
git status
echo -n "Previous version: v"
prev_version="$(python -c 'print(__import__("tomllib").load(open("pyproject.toml", "rb"))["project"]["version"])')"
echo "$prev_version"
read -p "Enter new version: v" version
tagid=v"$version"
if [ "$version" != "$prev_version" ]; then
sed -i 's/version = ".*"/version = "'"$version"'"/q' pyproject.toml
sed -i 's/## \[Unreleased\]/&\n### Added\n### Changed\n### Deprecated\n### Removed\n### Fixed\n### Security\n\n## ['"$version"'] - '"$(date --utc +%Y-%m-%d)"'/' CHANGELOG.md
echo; echo "Inspect CHANGELOG..."
${EDITOR:-nano} CHANGELOG.md
git add pyproject.toml CHANGELOG.md
git commit -m "Bump version to $version"
echo "Creating git tag $tagid"
git tag -s -m "Version $version" "$tagid"
else
echo "Version already created; building wheel and uploading"
fi
venv/bin/pip install --upgrade build
venv/bin/python -m build
read -p "Upload to Git and PyPI? (y/N) " confirm
if [ ! "$confirm" = y ]; then "Abort"; exit 1; fi
python3 -m twine upload dist/*-${version}*
git push origin "$tagid" master

View file

@ -1,16 +0,0 @@
#!/bin/sh
cd "`dirname "$0"`"/..
if [ ! -f venv/bin/pip-compile ]; then
venv/bin/pip install pip-tools
fi
cat <<EOF > requirements.txt
#
# This file is autogenerated. To update, run:
# tools/update_requirements.sh
#
EOF
venv/bin/pip-compile --quiet --generate-hashes --annotate --no-header --output-file="-" >> requirements.txt
echo "Updated requirements.txt"