diff --git a/mmcli/mmcli.py b/mmcli/mmcli.py index bd5c89e..edb30a0 100755 --- a/mmcli/mmcli.py +++ b/mmcli/mmcli.py @@ -4,6 +4,7 @@ import sys import argparse import os import json +import itertools from typing import Dict, Optional, List, Iterable, Tuple import re from time import sleep @@ -50,7 +51,7 @@ def get_user_id(mm_api: mattermost.MMApi): return mm_api._my_user_id -def get_posts_for_channel(self, channel_id: str, progress=lambda x: None, after=None, **kwargs) -> Iterable[Dict]: +def get_posts_for_channel(self, channel_id: str, progress=lambda x: None, after=None, maximum=None, **kwargs) -> Iterable[Dict]: """ @raises ApiException: Passed on from lower layers. """ @@ -59,6 +60,9 @@ def get_posts_for_channel(self, channel_id: str, progress=lambda x: None, after= total = 0 if after: + if maximum is not None: + raise ValueError("after and maximum cannot be used together") + # Posts in channel after a given ID: API gives pages with OLDEST messages first, so we can # yield each page when it is fetched while True: @@ -86,14 +90,19 @@ def get_posts_for_channel(self, channel_id: str, progress=lambda x: None, after= data_page = self._get(f"/v4/channels/{channel_id}/posts", params={"page": page, "per_page": per_page, **kwargs}) order = data_page["order"] - posts.extend( - data_page["posts"][post_id] - for post_id in order - if post_id not in post_ids - ) + this_maximum = None if maximum is None else maximum - len(posts) + + posts.extend(itertools.islice( + ( + data_page["posts"][post_id] + for post_id in order + if post_id not in post_ids + ), + this_maximum + )) post_ids |= set(order) progress(len(posts)) - if len(order) < per_page: + if len(order) < per_page or len(posts) >= maximum: break page += 1 sleep(0.1) @@ -254,6 +263,8 @@ def cat(mm_api: mattermost.MMApi, cmdline_args): def tail(mm_api: mattermost.MMApi, cmdline_args): + desired_count = cmdline_args.number + team, channel = resolve_team_channel(mm_api, cmdline_args.channel) if not cmdline_args.ids: @@ -276,12 +287,7 @@ def tail(mm_api: mattermost.MMApi, cmdline_args): backlog_lock = threading.Lock() def print_initial_messages(): - data_page = mm_api._get(f"/v4/channels/{channel['id']}/posts") - order = data_page["order"] - posts = [ - data_page["posts"][post_id] - for post_id in reversed(order) - ] + posts = get_posts_for_channel(mm_api, channel["id"], maximum=desired_count) for post in posts: print(str_for_post(attribute, post, cmdline_args)) @@ -616,6 +622,7 @@ Security note: Other programs and users can typically read which arguments you g parser_tail = subparsers.add_parser("tail", help="list newest messages in channel") parser_tail.add_argument("channel", help="URL names of team and channel: '/'") parser_tail.add_argument("-f", "--follow", action="store_true", help="keep running, printing new posts as they come in") + parser_tail.add_argument("-n", "--number", type=int, default=200, help="maximum amount of messages of backlog to fetch (when using with -f, new posts will be printed without limit)") parser_ls = subparsers.add_parser("ls", help="list channels") parser_ls.add_argument("team", help="URL name of team")