+5
-14
.dockerignore
+5
-14
.dockerignore
+2
-4
.gitignore
+2
-4
.gitignore
+50
.tangled/workflows/build-images.yml
+50
.tangled/workflows/build-images.yml
···
1
+
when:
2
+
- event: ["push", "manual"]
3
+
branch: master
4
+
5
+
engine: nixery
6
+
7
+
dependencies:
8
+
nixpkgs:
9
+
- kaniko
10
+
- regctl
11
+
12
+
environment:
13
+
GHCR_USER: "zenfyrdev"
14
+
15
+
steps:
16
+
- name: create auth configs
17
+
command: |
18
+
mkdir -p $HOME/.docker $HOME/.regctl
19
+
20
+
cat > $HOME/.docker/config.json <<EOF
21
+
{"auths": {"ghcr.io": {"auth": "$(echo -n "$GHCR_USER:$GHCR_PAT" | base64 -w0)"}}}
22
+
EOF
23
+
24
+
cat > $HOME/.regctl/config.json <<EOF
25
+
{"hosts": {"ghcr.io": {"user": "$GHCR_USER","pass": "$GHCR_PAT"}}}
26
+
EOF
27
+
28
+
- name: build amd64
29
+
command: |
30
+
executor \
31
+
--context=dir://. \
32
+
--dockerfile=Containerfile \
33
+
--verbosity=info \
34
+
--destination=ghcr.io/$GHCR_USER/xpost:amd64-latest \
35
+
--custom-platform=linux/amd64
36
+
37
+
- name: build arm64
38
+
command: |
39
+
executor \
40
+
--context=dir://. \
41
+
--dockerfile=Containerfile \
42
+
--verbosity=info \
43
+
--destination=ghcr.io/$GHCR_USER/xpost:arm64-latest \
44
+
--custom-platform=linux/arm64
45
+
46
+
- name: tag latest artifact
47
+
command: |
48
+
regctl index create ghcr.io/$GHCR_USER/xpost:latest \
49
+
--ref ghcr.io/$GHCR_USER/xpost:amd64-latest --platform linux/amd64 \
50
+
--ref ghcr.io/$GHCR_USER/xpost:arm64-latest --platform linux/arm64
-15
.tangled/workflows/run-tests.yml
-15
.tangled/workflows/run-tests.yml
+21
LICENSE
+21
LICENSE
···
1
+
MIT License
2
+
3
+
Copyright (c) 2025
4
+
5
+
Permission is hereby granted, free of charge, to any person obtaining a copy
6
+
of this software and associated documentation files (the "Software"), to deal
7
+
in the Software without restriction, including without limitation the rights
8
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+
copies of the Software, and to permit persons to whom the Software is
10
+
furnished to do so, subject to the following conditions:
11
+
12
+
The above copyright notice and this permission notice shall be included in all
13
+
copies or substantial portions of the Software.
14
+
15
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+
SOFTWARE.
+171
-7
README.md
+171
-7
README.md
···
1
-
# xpost next
1
+
# XPost
2
+
3
+
XPost is a social media cross-posting tool that differs from others by using streaming APIs to allow instant, zero-input cross-posting. this means you can continue posting on your preferred platform without using special apps.
4
+
5
+
XPost tries to support as many features as possible. for example, when cross-posting from mastodon to bluesky, unsupported file types will be attached as links. posts with mixed media or too many files will be split and spread across text.
6
+
7
+
the tool may undergo breaking changes as new features are added, so proceed with caution when deploying.
8
+
9
+
# Installation
10
+
11
+
## Native
12
+
13
+
first install `ffmpeg`, `ffprobe` and `libmagic`, make sure that `ffmpeg` is available on PATH! `ffmpeg` and `libmagic` are required to crosspost media.
14
+
15
+
then get [uv](https://github.com/astral-sh/uv) and sync the project
16
+
17
+
```
18
+
uv sync
19
+
```
20
+
21
+
generate settings.json on first launch
22
+
23
+
```
24
+
uv run main.py
25
+
```
26
+
27
+
## Docker Compose
28
+
29
+
the official immage is available on [docker hub](https://hub.docker.com/r/melontini/xpost). example `compose.yaml`. this assumes that data dir is `./data`, and env file is `./.config/docker.env`. add `:Z` to volume mounts for podman.
30
+
31
+
```yaml
32
+
services:
33
+
xpost:
34
+
image: melontini/xpost:latest
35
+
restart: unless-stopped
36
+
env_file: ./.config/docker.env
37
+
volumes:
38
+
- ./data:/app/data
39
+
```
40
+
41
+
# Settings
42
+
43
+
the tool allows you to specify an input and multiple outputs to post to.
44
+
45
+
some options accept a envvar syntax:
2
46
3
-
> [!NOTE]
4
-
> this is the dev branch for xpost next, a full rewrite of xpost. the older version is available on the master branch.
5
-
>
6
-
> planned work for this branch can be found and tracked here: https://tangled.org/@zenfyr.dev/xpost/issues/1
47
+
```json
48
+
{
49
+
"token": "env:TOKEN"
50
+
}
51
+
```
7
52
8
-
xpost is a social media cross-posting tool that differs from others by using streaming APIs to allow instant, zero-input cross-posting. this means you can continue posting on your preferred platform without using special apps.
53
+
## Inputs
9
54
10
-
xpost tries to support as many features as possible. for example, when cross-posting from mastodon to bluesky, unsupported file types will be attached as links. posts with mixed media or too many files will be split and spread across text.
55
+
all inputs have common options.
56
+
57
+
```json5
58
+
{
59
+
"options": {
60
+
"regex_filters": [ //posts matching any of the following regexes will be skipped
61
+
"(?i)\\b(?:test|hello|hi)\\b"
62
+
]
63
+
}
64
+
}
65
+
```
66
+
67
+
### Bluesky Jetstream
68
+
69
+
listens to repo operation events emmited by Jetstream. handle becomes optional if you specify a DID.
70
+
71
+
```json5
72
+
{
73
+
"type": "bluesky-jetstream-wss",
74
+
"handle": "env:BLUESKY_HANDLE", // handle (e.g. melontini.me)
75
+
"did": "env:BLUESKY_DID", // use a DID instead of handle (avoids handle resolution)
76
+
"jetstream": "wss://jetstream2.us-east.bsky.network/subscribe" //optional, change jetstream endpoint
77
+
}
78
+
```
79
+
80
+
### Mastodon WebSocket `mastodon-wss`
81
+
82
+
listens to the user's home timeline for new posts, crossposts only the public/unlisted ones by the user.
83
+
84
+
```json5
85
+
{
86
+
"type": "mastodon-wss", // type
87
+
"instance": "env:MASTODON_INSTANCE", // mastodon api compatible instance
88
+
"token": "env:MASTODON_TOKEN", // Must be a mastodon token. get from something like phanpy + webtools. or https://getauth.thms.uk/?client_name=xpost&scopes=read:statuses%20write:statuses%20profile but doesn't work with all software
89
+
"options": {
90
+
"allowed_visibility": [
91
+
"public",
92
+
"unlisted"
93
+
]
94
+
}
95
+
}
96
+
```
97
+
98
+
any instance implementing `/api/v1/instance`, `/api/v1/accounts/verify_credentials` and `/api/v1/streaming?stream` will work fine.
99
+
100
+
confirmed supported:
101
+
- Mastodon
102
+
- Iceshrimp.NET
103
+
- Akkoma
104
+
105
+
confirmed unsupported:
106
+
- Mitra
107
+
- Sharkey
108
+
109
+
### Misskey WebSocket
110
+
111
+
listens to the homeTimeline channel for new posts, crossposts only the public/home ones by the user.
112
+
113
+
**IMPORTANT**: Misskey WSS does Not support deletes, you must delete posts manually. if you know how i can listen to all note events, i would appreciate your help.
114
+
115
+
```json5
116
+
{
117
+
"type": "misskey-wss", // type
118
+
"instance": "env:MISSKEY_INSTANCE", // misskey instance
119
+
"token": "env:MISSKEY_TOKEN", // access token with the `View your account information` scope
120
+
"options": {
121
+
"allowed_visibility": [
122
+
"public",
123
+
"home"
124
+
]
125
+
}
126
+
}
127
+
```
128
+
129
+
Misskey API is not very good, this also wasn't tested on vanilla misskey.
130
+
131
+
confirmed supported:
132
+
- Sharkey
133
+
134
+
## Outputs
135
+
136
+
### Mastodon API
137
+
138
+
no remarks.
139
+
140
+
```json5
141
+
{
142
+
"type": "mastodon",
143
+
"token": "env:MASTODON_TOKEN", // Must be a mastodon token. get from something like phanpy + webtools. or https://getauth.thms.uk/?client_name=xpost&scopes=read%20write%20profile but doesn't work with all software
144
+
"instance": "env:MASTODON_INSTNACE", // mastodon api compatible instance
145
+
"options": {
146
+
"visibility": "public"
147
+
}
148
+
}
149
+
```
150
+
151
+
### Bluesky
152
+
153
+
in the bluesky block, you can configure who is allowed to reply to and quote the new posts. handle becomes optional if you specify a DID.
154
+
155
+
```json5
156
+
{
157
+
"type": "bluesky", // type
158
+
"handle": "env:BLUESKY_HANDLE", // handle (e.g. melontini.me)
159
+
"app_password": "env:BLUESKY_APP_PASSWORD", // https://bsky.app/settings/app-passwords
160
+
"did": "env:BLUESKY_DID", // use a DID instead of handle (avoids handle resolution)
161
+
"pds": "env:BLUESKY_PDS", // specify Your PDS directly (avoids DID doc lookup)
162
+
"bsky_appview": "env:BLUESKY_APPVIEW", // bypass suspensions by specifying a different appview (e.g. did:web:bsky.zeppelin.social)
163
+
"options": {
164
+
"encode_videos": true, // bluesky only accepts mp4 videos, try to convert if the video is not mp4
165
+
"quote_gate": false, // block users from quoting the post
166
+
"thread_gate": [ // block replies. leave empty to disable replies
167
+
"mentioned",
168
+
"following",
169
+
"followers",
170
+
"everybody" // allow everybody to reply (ignores other options)
171
+
]
172
+
}
173
+
}
174
+
```
-164
atproto/identity.py
-164
atproto/identity.py
···
1
-
from pathlib import Path
2
-
from typing import Any, override
3
-
4
-
import dns.resolver
5
-
import requests
6
-
7
-
import env
8
-
from util.cache import Cacheable, TTLCache
9
-
from util.util import LOGGER, normalize_service_url, shutdown_hook
10
-
11
-
12
-
class DidDocument():
13
-
def __init__(self, raw_doc: dict[str, Any]) -> None:
14
-
self.raw: dict[str, Any] = raw_doc
15
-
self.atproto_pds: str | None = None
16
-
17
-
def get_atproto_pds(self) -> str | None:
18
-
if self.atproto_pds:
19
-
return self.atproto_pds
20
-
21
-
services = self.raw.get("service")
22
-
if not services:
23
-
return None
24
-
25
-
for service in services:
26
-
if (
27
-
service.get("id") == "#atproto_pds"
28
-
and service.get("type") == "AtprotoPersonalDataServer"
29
-
):
30
-
endpoint = service.get("serviceEndpoint")
31
-
if endpoint:
32
-
url = normalize_service_url(endpoint)
33
-
self.atproto_pds = url
34
-
return url
35
-
self.atproto_pds = ""
36
-
return None
37
-
38
-
39
-
class DidResolver(Cacheable):
40
-
def __init__(self, plc_host: str) -> None:
41
-
self.plc_host: str = plc_host
42
-
self.__cache: TTLCache[str, DidDocument] = TTLCache(ttl_seconds=12 * 60 * 60)
43
-
44
-
def try_resolve_plc(self, did: str) -> DidDocument | None:
45
-
url = f"{self.plc_host}/{did}"
46
-
response = requests.get(url, timeout=10, allow_redirects=True)
47
-
48
-
if response.status_code == 200:
49
-
return DidDocument(response.json())
50
-
elif response.status_code == 404 or response.status_code == 410:
51
-
return None # tombstone or not registered
52
-
else:
53
-
response.raise_for_status()
54
-
55
-
def try_resolve_web(self, did: str) -> DidDocument | None:
56
-
url = f"http://{did[len('did:web:') :]}/.well-known/did.json"
57
-
response = requests.get(url, timeout=10, allow_redirects=True)
58
-
59
-
if response.status_code == 200:
60
-
return DidDocument(response.json())
61
-
elif response.status_code == 404 or response.status_code == 410:
62
-
return None # tombstone or gone
63
-
else:
64
-
response.raise_for_status()
65
-
66
-
def resolve_did(self, did: str) -> DidDocument:
67
-
cached = self.__cache.get(did)
68
-
if cached:
69
-
return cached
70
-
71
-
if did.startswith("did:plc:"):
72
-
from_plc = self.try_resolve_plc(did)
73
-
if from_plc:
74
-
self.__cache.set(did, from_plc)
75
-
return from_plc
76
-
elif did.startswith("did:web:"):
77
-
from_web = self.try_resolve_web(did)
78
-
if from_web:
79
-
self.__cache.set(did, from_web)
80
-
return from_web
81
-
raise Exception(f"Failed to resolve {did}!")
82
-
83
-
@override
84
-
def dump_cache(self, path: Path):
85
-
self.__cache.dump_cache(path)
86
-
87
-
@override
88
-
def load_cache(self, path: Path):
89
-
self.__cache.load_cache(path)
90
-
91
-
class HandleResolver(Cacheable):
92
-
def __init__(self) -> None:
93
-
self.__cache: TTLCache[str, str] = TTLCache(ttl_seconds=12 * 60 * 60)
94
-
95
-
def try_resolve_dns(self, handle: str) -> str | None:
96
-
try:
97
-
dns_query = f"_atproto.{handle}"
98
-
answers = dns.resolver.resolve(dns_query, "TXT")
99
-
100
-
for rdata in answers:
101
-
for txt_data in rdata.strings:
102
-
did = txt_data.decode("utf-8").strip()
103
-
if did.startswith("did="):
104
-
return did[4:]
105
-
except dns.resolver.NXDOMAIN:
106
-
LOGGER.debug(f"DNS record not found for _atproto.{handle}")
107
-
return None
108
-
except dns.resolver.NoAnswer:
109
-
LOGGER.debug(f"No TXT records found for _atproto.{handle}")
110
-
return None
111
-
112
-
def try_resolve_http(self, handle: str) -> str | None:
113
-
url = f"http://{handle}/.well-known/atproto-did"
114
-
response = requests.get(url, timeout=10, allow_redirects=True)
115
-
116
-
if response.status_code == 200:
117
-
did = response.text.strip()
118
-
if did.startswith("did:"):
119
-
return did
120
-
else:
121
-
raise ValueError(f"Got invalid did: from {url} = {did}!")
122
-
else:
123
-
response.raise_for_status()
124
-
125
-
def resolve_handle(self, handle: str) -> str:
126
-
cached = self.__cache.get(handle)
127
-
if cached:
128
-
return cached
129
-
130
-
from_dns = self.try_resolve_dns(handle)
131
-
if from_dns:
132
-
self.__cache.set(handle, from_dns)
133
-
return from_dns
134
-
135
-
from_http = self.try_resolve_http(handle)
136
-
if from_http:
137
-
self.__cache.set(handle, from_http)
138
-
return from_http
139
-
140
-
raise Exception(f"Failed to resolve handle {handle}!")
141
-
142
-
@override
143
-
def dump_cache(self, path: Path):
144
-
self.__cache.dump_cache(path)
145
-
146
-
@override
147
-
def load_cache(self, path: Path):
148
-
self.__cache.load_cache(path)
149
-
150
-
151
-
handle_resolver = HandleResolver()
152
-
did_resolver = DidResolver(env.PLC_HOST)
153
-
154
-
did_cache = env.CACHE_DIR.joinpath('did.cache')
155
-
handle_cache = env.CACHE_DIR.joinpath('handle.cache')
156
-
157
-
did_resolver.load_cache(did_cache)
158
-
handle_resolver.load_cache(handle_cache)
159
-
160
-
def cache_dump():
161
-
did_resolver.dump_cache(did_cache)
162
-
handle_resolver.dump_cache(handle_cache)
163
-
164
-
shutdown_hook.append(cache_dump)
-11
atproto/util.py
-11
atproto/util.py
···
1
-
URI = "at://"
2
-
URI_LEN = len(URI)
3
-
4
-
5
-
class AtUri:
6
-
@classmethod
7
-
def record_uri(cls, uri: str) -> tuple[str, str, str]:
8
-
did, collection, rid = uri[URI_LEN:].split("/")
9
-
if not (did and collection and rid):
10
-
raise ValueError(f"Ivalid record uri {uri}!")
11
-
return did, collection, rid
+196
bluesky/atproto2.py
+196
bluesky/atproto2.py
···
1
+
from typing import Any
2
+
3
+
from atproto import AtUri, Client, IdResolver, client_utils
4
+
from atproto_client import models
5
+
6
+
from util.util import LOGGER
7
+
8
+
9
+
def resolve_identity(
10
+
handle: str | None = None, did: str | None = None, pds: str | None = None
11
+
):
12
+
"""helper to try and resolve identity from provided parameters, a valid handle is enough"""
13
+
14
+
if did and pds:
15
+
return did, pds[:-1] if pds.endswith("/") else pds
16
+
17
+
resolver = IdResolver()
18
+
if not did:
19
+
if not handle:
20
+
raise Exception("ATP handle not specified!")
21
+
LOGGER.info("Resolving ATP identity for %s...", handle)
22
+
did = resolver.handle.resolve(handle)
23
+
if not did:
24
+
raise Exception("Failed to resolve DID!")
25
+
26
+
if not pds:
27
+
LOGGER.info("Resolving PDS from DID document...")
28
+
did_doc = resolver.did.resolve(did)
29
+
if not did_doc:
30
+
raise Exception("Failed to resolve DID doc for '%s'", did)
31
+
pds = did_doc.get_pds_endpoint()
32
+
if not pds:
33
+
raise Exception("Failed to resolve PDS!")
34
+
35
+
return did, pds[:-1] if pds.endswith("/") else pds
36
+
37
+
38
+
class Client2(Client):
39
+
def __init__(self, base_url: str | None = None, *args: Any, **kwargs: Any) -> None:
40
+
super().__init__(base_url, *args, **kwargs)
41
+
42
+
def send_video(
43
+
self,
44
+
text: str | client_utils.TextBuilder,
45
+
video: bytes,
46
+
video_alt: str | None = None,
47
+
video_aspect_ratio: models.AppBskyEmbedDefs.AspectRatio | None = None,
48
+
reply_to: models.AppBskyFeedPost.ReplyRef | None = None,
49
+
langs: list[str] | None = None,
50
+
facets: list[models.AppBskyRichtextFacet.Main] | None = None,
51
+
labels: models.ComAtprotoLabelDefs.SelfLabels | None = None,
52
+
time_iso: str | None = None,
53
+
) -> models.AppBskyFeedPost.CreateRecordResponse:
54
+
"""same as send_video, but with labels"""
55
+
56
+
if video_alt is None:
57
+
video_alt = ""
58
+
59
+
upload = self.upload_blob(video)
60
+
61
+
return self.send_post(
62
+
text,
63
+
reply_to=reply_to,
64
+
embed=models.AppBskyEmbedVideo.Main(
65
+
video=upload.blob, alt=video_alt, aspect_ratio=video_aspect_ratio
66
+
),
67
+
langs=langs,
68
+
facets=facets,
69
+
labels=labels,
70
+
time_iso=time_iso,
71
+
)
72
+
73
+
def send_images(
74
+
self,
75
+
text: str | client_utils.TextBuilder,
76
+
images: list[bytes],
77
+
image_alts: list[str] | None = None,
78
+
image_aspect_ratios: list[models.AppBskyEmbedDefs.AspectRatio] | None = None,
79
+
reply_to: models.AppBskyFeedPost.ReplyRef | None = None,
80
+
langs: list[str] | None = None,
81
+
facets: list[models.AppBskyRichtextFacet.Main] | None = None,
82
+
labels: models.ComAtprotoLabelDefs.SelfLabels | None = None,
83
+
time_iso: str | None = None,
84
+
) -> models.AppBskyFeedPost.CreateRecordResponse:
85
+
"""same as send_images, but with labels"""
86
+
87
+
if image_alts is None:
88
+
image_alts = [""] * len(images)
89
+
else:
90
+
diff = len(images) - len(image_alts)
91
+
image_alts = image_alts + [""] * diff
92
+
93
+
if image_aspect_ratios is None:
94
+
aligned_image_aspect_ratios = [None] * len(images)
95
+
else:
96
+
diff = len(images) - len(image_aspect_ratios)
97
+
aligned_image_aspect_ratios = image_aspect_ratios + [None] * diff
98
+
99
+
uploads = [self.upload_blob(image) for image in images]
100
+
101
+
embed_images = [
102
+
models.AppBskyEmbedImages.Image(
103
+
alt=alt, image=upload.blob, aspect_ratio=aspect_ratio
104
+
)
105
+
for alt, upload, aspect_ratio in zip(
106
+
image_alts, uploads, aligned_image_aspect_ratios
107
+
)
108
+
]
109
+
110
+
return self.send_post(
111
+
text,
112
+
reply_to=reply_to,
113
+
embed=models.AppBskyEmbedImages.Main(images=embed_images),
114
+
langs=langs,
115
+
facets=facets,
116
+
labels=labels,
117
+
time_iso=time_iso,
118
+
)
119
+
120
+
def send_post(
121
+
self,
122
+
text: str | client_utils.TextBuilder,
123
+
reply_to: models.AppBskyFeedPost.ReplyRef | None = None,
124
+
embed: None
125
+
| models.AppBskyEmbedImages.Main
126
+
| models.AppBskyEmbedExternal.Main
127
+
| models.AppBskyEmbedRecord.Main
128
+
| models.AppBskyEmbedRecordWithMedia.Main
129
+
| models.AppBskyEmbedVideo.Main = None,
130
+
langs: list[str] | None = None,
131
+
facets: list[models.AppBskyRichtextFacet.Main] | None = None,
132
+
labels: models.ComAtprotoLabelDefs.SelfLabels | None = None,
133
+
time_iso: str | None = None,
134
+
) -> models.AppBskyFeedPost.CreateRecordResponse:
135
+
"""same as send_post, but with labels"""
136
+
137
+
if isinstance(text, client_utils.TextBuilder):
138
+
facets = text.build_facets()
139
+
text = text.build_text()
140
+
141
+
repo = self.me and self.me.did
142
+
if not repo:
143
+
raise Exception("Client not logged in!")
144
+
145
+
if not langs:
146
+
langs = ["en"]
147
+
148
+
record = models.AppBskyFeedPost.Record(
149
+
created_at=time_iso or self.get_current_time_iso(),
150
+
text=text,
151
+
reply=reply_to or None,
152
+
embed=embed or None,
153
+
langs=langs,
154
+
facets=facets or None,
155
+
labels=labels or None,
156
+
)
157
+
return self.app.bsky.feed.post.create(repo, record)
158
+
159
+
def create_gates(
160
+
self,
161
+
thread_gate_opts: list[str],
162
+
quote_gate: bool,
163
+
post_uri: str,
164
+
time_iso: str | None = None,
165
+
):
166
+
account = self.me
167
+
if not account:
168
+
raise Exception("Client not logged in!")
169
+
170
+
rkey = AtUri.from_str(post_uri).rkey
171
+
time_iso = time_iso or self.get_current_time_iso()
172
+
173
+
if "everybody" not in thread_gate_opts:
174
+
allow = []
175
+
if thread_gate_opts:
176
+
if "following" in thread_gate_opts:
177
+
allow.append(models.AppBskyFeedThreadgate.FollowingRule())
178
+
if "followers" in thread_gate_opts:
179
+
allow.append(models.AppBskyFeedThreadgate.FollowerRule())
180
+
if "mentioned" in thread_gate_opts:
181
+
allow.append(models.AppBskyFeedThreadgate.MentionRule())
182
+
183
+
thread_gate = models.AppBskyFeedThreadgate.Record(
184
+
post=post_uri, created_at=time_iso, allow=allow
185
+
)
186
+
187
+
self.app.bsky.feed.threadgate.create(account.did, thread_gate, rkey)
188
+
189
+
if quote_gate:
190
+
post_gate = models.AppBskyFeedPostgate.Record(
191
+
post=post_uri,
192
+
created_at=time_iso,
193
+
embedding_rules=[models.AppBskyFeedPostgate.DisableRule()],
194
+
)
195
+
196
+
self.app.bsky.feed.postgate.create(account.did, post_gate, rkey)
+199
bluesky/common.py
+199
bluesky/common.py
···
1
+
import re
2
+
3
+
from atproto import client_utils
4
+
5
+
import cross
6
+
from util.media import MediaInfo
7
+
from util.util import canonical_label
8
+
9
+
# only for lexicon reference
10
+
SERVICE = "https://bsky.app"
11
+
12
+
# TODO this is terrible and stupid
13
+
ADULT_PATTERN = re.compile(
14
+
r"\b(sexual content|nsfw|erotic|adult only|18\+)\b", re.IGNORECASE
15
+
)
16
+
PORN_PATTERN = re.compile(r"\b(porn|yiff|hentai|pornographic|fetish)\b", re.IGNORECASE)
17
+
18
+
19
+
class BlueskyPost(cross.Post):
20
+
def __init__(
21
+
self, record: dict, tokens: list[cross.Token], attachments: list[MediaInfo]
22
+
) -> None:
23
+
super().__init__()
24
+
self.uri = record["$xpost.strongRef"]["uri"]
25
+
self.parent_uri = None
26
+
if record.get("reply"):
27
+
self.parent_uri = record["reply"]["parent"]["uri"]
28
+
29
+
self.tokens = tokens
30
+
self.timestamp = record["createdAt"]
31
+
labels = record.get("labels", {}).get("values")
32
+
self.spoiler = None
33
+
if labels:
34
+
self.spoiler = ", ".join(
35
+
[str(label["val"]).replace("-", " ") for label in labels]
36
+
)
37
+
38
+
self.attachments = attachments
39
+
self.languages = record.get("langs", [])
40
+
41
+
# at:// of the post record
42
+
def get_id(self) -> str:
43
+
return self.uri
44
+
45
+
def get_parent_id(self) -> str | None:
46
+
return self.parent_uri
47
+
48
+
def get_tokens(self) -> list[cross.Token]:
49
+
return self.tokens
50
+
51
+
def get_text_type(self) -> str:
52
+
return "text/plain"
53
+
54
+
def get_timestamp(self) -> str:
55
+
return self.timestamp
56
+
57
+
def get_attachments(self) -> list[MediaInfo]:
58
+
return self.attachments
59
+
60
+
def get_spoiler(self) -> str | None:
61
+
return self.spoiler
62
+
63
+
def get_languages(self) -> list[str]:
64
+
return self.languages
65
+
66
+
def is_sensitive(self) -> bool:
67
+
return self.spoiler is not None
68
+
69
+
def get_post_url(self) -> str | None:
70
+
did, _, post_id = str(self.uri[len("at://") :]).split("/")
71
+
72
+
return f"https://bsky.app/profile/{did}/post/{post_id}"
73
+
74
+
75
+
def tokenize_post(post: dict) -> list[cross.Token]:
76
+
text: str = post.get("text", "")
77
+
if not text:
78
+
return []
79
+
ut8_text = text.encode(encoding="utf-8")
80
+
81
+
def decode(ut8: bytes) -> str:
82
+
return ut8.decode(encoding="utf-8")
83
+
84
+
facets: list[dict] = post.get("facets", [])
85
+
if not facets:
86
+
return [cross.TextToken(decode(ut8_text))]
87
+
88
+
slices: list[tuple[int, int, str, str]] = []
89
+
90
+
for facet in facets:
91
+
features: list[dict] = facet.get("features", [])
92
+
if not features:
93
+
continue
94
+
95
+
# we don't support overlapping facets/features
96
+
feature = features[0]
97
+
feature_type = feature["$type"]
98
+
index = facet["index"]
99
+
match feature_type:
100
+
case "app.bsky.richtext.facet#tag":
101
+
slices.append(
102
+
(index["byteStart"], index["byteEnd"], "tag", feature["tag"])
103
+
)
104
+
case "app.bsky.richtext.facet#link":
105
+
slices.append(
106
+
(index["byteStart"], index["byteEnd"], "link", feature["uri"])
107
+
)
108
+
case "app.bsky.richtext.facet#mention":
109
+
slices.append(
110
+
(index["byteStart"], index["byteEnd"], "mention", feature["did"])
111
+
)
112
+
113
+
if not slices:
114
+
return [cross.TextToken(decode(ut8_text))]
115
+
116
+
slices.sort(key=lambda s: s[0])
117
+
unique: list[tuple[int, int, str, str]] = []
118
+
current_end = 0
119
+
for start, end, ttype, val in slices:
120
+
if start >= current_end:
121
+
unique.append((start, end, ttype, val))
122
+
current_end = end
123
+
124
+
if not unique:
125
+
return [cross.TextToken(decode(ut8_text))]
126
+
127
+
tokens: list[cross.Token] = []
128
+
prev = 0
129
+
130
+
for start, end, ttype, val in unique:
131
+
if start > prev:
132
+
# text between facets
133
+
tokens.append(cross.TextToken(decode(ut8_text[prev:start])))
134
+
# facet token
135
+
match ttype:
136
+
case "link":
137
+
label = decode(ut8_text[start:end])
138
+
139
+
# try to unflatten links
140
+
split = val.split("://", 1)
141
+
if len(split) > 1:
142
+
if split[1].startswith(label):
143
+
tokens.append(cross.LinkToken(val, ""))
144
+
prev = end
145
+
continue
146
+
147
+
if label.endswith("...") and split[1].startswith(label[:-3]):
148
+
tokens.append(cross.LinkToken(val, ""))
149
+
prev = end
150
+
continue
151
+
152
+
tokens.append(cross.LinkToken(val, label))
153
+
case "tag":
154
+
tag = decode(ut8_text[start:end])
155
+
tokens.append(cross.TagToken(tag[1:] if tag.startswith("#") else tag))
156
+
case "mention":
157
+
mention = decode(ut8_text[start:end])
158
+
tokens.append(
159
+
cross.MentionToken(
160
+
mention[1:] if mention.startswith("@") else mention, val
161
+
)
162
+
)
163
+
prev = end
164
+
165
+
if prev < len(ut8_text):
166
+
tokens.append(cross.TextToken(decode(ut8_text[prev:])))
167
+
168
+
return tokens
169
+
170
+
171
+
def tokens_to_richtext(tokens: list[cross.Token]) -> client_utils.TextBuilder | None:
172
+
builder = client_utils.TextBuilder()
173
+
174
+
def flatten_link(href: str):
175
+
split = href.split("://", 1)
176
+
if len(split) > 1:
177
+
href = split[1]
178
+
179
+
if len(href) > 32:
180
+
href = href[:32] + "..."
181
+
182
+
return href
183
+
184
+
for token in tokens:
185
+
if isinstance(token, cross.TextToken):
186
+
builder.text(token.text)
187
+
elif isinstance(token, cross.LinkToken):
188
+
if canonical_label(token.label, token.href):
189
+
builder.link(flatten_link(token.href), token.href)
190
+
continue
191
+
192
+
builder.link(token.label, token.href)
193
+
elif isinstance(token, cross.TagToken):
194
+
builder.tag("#" + token.tag, token.tag.lower())
195
+
else:
196
+
# fail on unsupported tokens
197
+
return None
198
+
199
+
return builder
-50
bluesky/info.py
-50
bluesky/info.py
···
1
-
from abc import ABC, abstractmethod
2
-
from typing import Any, override
3
-
4
-
from atproto.identity import did_resolver, handle_resolver
5
-
from cross.service import Service
6
-
from util.util import normalize_service_url
7
-
8
-
SERVICE = "https://bsky.app"
9
-
10
-
11
-
def validate_and_transform(data: dict[str, Any]):
12
-
if not data["handle"] and not data["did"]:
13
-
raise KeyError("no 'handle' or 'did' specified for bluesky input!")
14
-
15
-
if "did" in data:
16
-
did = str(data["did"]) # only did:web and did:plc are supported
17
-
if not did.startswith("did:plc:") and not did.startswith("did:web:"):
18
-
raise ValueError(f"Invalid handle {did}!")
19
-
20
-
if "pds" in data:
21
-
data["pds"] = normalize_service_url(data["pds"])
22
-
23
-
24
-
class BlueskyService(ABC, Service):
25
-
pds: str
26
-
did: str
27
-
28
-
def _init_identity(self) -> None:
29
-
handle, did, pds = self.get_identity_options()
30
-
if did:
31
-
self.did = did
32
-
if pds:
33
-
self.pds = pds
34
-
35
-
if not did:
36
-
if not handle:
37
-
raise KeyError("No did: or atproto handle provided!")
38
-
self.log.info("Resolving ATP identity for %s...", handle)
39
-
self.did = handle_resolver.resolve_handle(handle)
40
-
41
-
if not pds:
42
-
self.log.info("Resolving PDS from %s DID document...", self.did)
43
-
atp_pds = did_resolver.resolve_did(self.did).get_atproto_pds()
44
-
if not atp_pds:
45
-
raise Exception("Failed to resolve atproto pds for %s")
46
-
self.pds = atp_pds
47
-
48
-
@abstractmethod
49
-
def get_identity_options(self) -> tuple[str | None, str | None, str | None]:
50
-
pass
+139
-219
bluesky/input.py
+139
-219
bluesky/input.py
···
1
1
import asyncio
2
2
import json
3
3
import re
4
-
from abc import ABC
5
-
from dataclasses import dataclass, field
6
-
from typing import Any, cast, override
4
+
from typing import Any, Callable
7
5
8
6
import websockets
7
+
from atproto_client import models
8
+
from atproto_client.models.utils import get_or_create as get_model_or_create
9
9
10
-
from atproto.util import AtUri
11
-
from bluesky.tokens import tokenize_post
12
-
from bluesky.info import SERVICE, BlueskyService, validate_and_transform
13
-
from cross.attachments import (
14
-
LabelsAttachment,
15
-
LanguagesAttachment,
16
-
MediaAttachment,
17
-
QuoteAttachment,
18
-
RemoteUrlAttachment,
19
-
)
20
-
from cross.media import Blob, download_blob
21
-
from cross.post import Post
22
-
from cross.service import InputService
23
-
from database.connection import DatabasePool
24
-
from util.util import normalize_service_url
10
+
import cross
11
+
import util.database as database
12
+
from bluesky.atproto2 import resolve_identity
13
+
from bluesky.common import SERVICE, BlueskyPost, tokenize_post
14
+
from util.database import DataBaseWorker
15
+
from util.media import MediaInfo, download_media
16
+
from util.util import LOGGER, as_envvar
25
17
26
18
27
-
@dataclass(kw_only=True)
28
19
class BlueskyInputOptions:
29
-
handle: str | None = None
30
-
did: str | None = None
31
-
pds: str | None = None
32
-
filters: list[re.Pattern[str]] = field(default_factory=lambda: [])
33
-
34
-
@classmethod
35
-
def from_dict(cls, data: dict[str, Any]) -> "BlueskyInputOptions":
36
-
validate_and_transform(data)
20
+
def __init__(self, o: dict) -> None:
21
+
self.filters = [re.compile(f) for f in o.get("regex_filters", [])]
37
22
38
-
if "filters" in data:
39
-
data["filters"] = [re.compile(r) for r in data["filters"]]
40
23
41
-
return BlueskyInputOptions(**data)
42
-
43
-
44
-
@dataclass(kw_only=True)
45
-
class BlueskyJetstreamInputOptions(BlueskyInputOptions):
46
-
jetstream: str = "wss://jetstream2.us-west.bsky.network/subscribe"
24
+
class BlueskyInput(cross.Input):
25
+
def __init__(self, settings: dict, db: DataBaseWorker) -> None:
26
+
self.options = BlueskyInputOptions(settings.get("options", {}))
27
+
did, pds = resolve_identity(
28
+
handle=as_envvar(settings.get("handle")),
29
+
did=as_envvar(settings.get("did")),
30
+
pds=as_envvar(settings.get("pds")),
31
+
)
32
+
self.pds = pds
47
33
48
-
@classmethod
49
-
def from_dict(cls, data: dict[str, Any]) -> "BlueskyJetstreamInputOptions":
50
-
jetstream = data.pop("jetstream", None)
34
+
# PDS is Not a service, the lexicon and rids are the same across pds
35
+
super().__init__(SERVICE, did, settings, db)
51
36
52
-
base = BlueskyInputOptions.from_dict(data).__dict__.copy()
53
-
if jetstream:
54
-
base["jetstream"] = normalize_service_url(jetstream)
37
+
def _on_post(self, outputs: list[cross.Output], post: dict[str, Any]):
38
+
post_uri = post["$xpost.strongRef"]["uri"]
39
+
post_cid = post["$xpost.strongRef"]["cid"]
55
40
56
-
return BlueskyJetstreamInputOptions(**base)
41
+
parent_uri = None
42
+
if post.get("reply"):
43
+
parent_uri = post["reply"]["parent"]["uri"]
57
44
58
-
59
-
class BlueskyBaseInputService(BlueskyService, InputService, ABC):
60
-
def __init__(self, db: DatabasePool) -> None:
61
-
super().__init__(SERVICE, db)
45
+
embed = post.get("embed", {})
46
+
if embed.get("$type") in (
47
+
"app.bsky.embed.record",
48
+
"app.bsky.embed.recordWithMedia",
49
+
):
50
+
did, collection, rid = str(embed["record"]["uri"][len("at://") :]).split(
51
+
"/"
52
+
)
53
+
if collection == "app.bsky.feed.post":
54
+
LOGGER.info("Skipping '%s'! Quote..", post_uri)
55
+
return
62
56
63
-
def _on_post(self, record: dict[str, Any]):
64
-
post_uri = cast(str, record["$xpost.strongRef"]["uri"])
65
-
post_cid = cast(str, record["$xpost.strongRef"]["cid"])
66
-
67
-
parent_uri = cast(
68
-
str, None if not record.get("reply") else record["reply"]["parent"]["uri"]
57
+
success = database.try_insert_post(
58
+
self.db, post_uri, parent_uri, self.user_id, self.service
69
59
)
70
-
parent = None
71
-
if parent_uri:
72
-
parent = self._get_post(self.url, self.did, parent_uri)
73
-
if not parent:
74
-
self.log.info(
75
-
"Skipping %s, parent %s not found in db", post_uri, parent_uri
76
-
)
77
-
return
78
-
79
-
tokens = tokenize_post(record["text"], record.get('facets', {}))
80
-
post = Post(id=post_uri, parent_id=parent_uri, tokens=tokens)
81
-
82
-
did, _, rid = AtUri.record_uri(post_uri)
83
-
post.attachments.put(
84
-
RemoteUrlAttachment(url=f"https://bsky.app/profile/{did}/post/{rid}")
60
+
if not success:
61
+
LOGGER.info("Skipping '%s' as parent post was not found in db!", post_uri)
62
+
return
63
+
database.store_data(
64
+
self.db, post_uri, self.user_id, self.service, {"cid": post_cid}
85
65
)
86
66
87
-
embed: dict[str, Any] = record.get("embed", {})
88
-
blob_urls: list[tuple[str, str, str | None]] = []
89
-
def handle_embeds(embed: dict[str, Any]) -> str | None:
90
-
nonlocal blob_urls, post
91
-
match cast(str, embed["$type"]):
92
-
case "app.bsky.embed.record" | "app.bsky.embed.recordWithMedia":
93
-
rcrd = embed['record']['record'] if embed['record'].get('record') else embed['record']
94
-
did, collection, _ = AtUri.record_uri(rcrd["uri"])
95
-
if collection != "app.bsky.feed.post":
96
-
return f"Unhandled record collection {collection}"
97
-
if did != self.did:
98
-
return ""
67
+
tokens = tokenize_post(post)
68
+
if not cross.test_filters(tokens, self.options.filters):
69
+
LOGGER.info("Skipping '%s'. Matched a filter!", post_uri)
70
+
return
99
71
100
-
rquote = self._get_post(self.url, did, rcrd["uri"])
101
-
if not rquote:
102
-
return f"Quote {rcrd["uri"]} not found in the db"
103
-
post.attachments.put(QuoteAttachment(quoted_id=rcrd["uri"], quoted_user=did))
72
+
LOGGER.info("Crossposting '%s'...", post_uri)
104
73
105
-
if embed.get('media'):
106
-
return handle_embeds(embed["media"])
107
-
case "app.bsky.embed.images":
108
-
for image in embed["images"]:
109
-
blob_cid = image["image"]["ref"]["$link"]
110
-
url = f"{self.pds}/xrpc/com.atproto.sync.getBlob?did={self.did}&cid={blob_cid}"
111
-
blob_urls.append((url, blob_cid, image.get("alt")))
112
-
case "app.bsky.embed.video":
113
-
blob_cid = embed["video"]["ref"]["$link"]
114
-
url = f"{self.pds}/xrpc/com.atproto.sync.getBlob?did={self.did}&cid={blob_cid}"
115
-
blob_urls.append((url, blob_cid, embed.get("alt")))
116
-
case _:
117
-
self.log.warning(f"Unhandled embed type {embed['$type']}")
74
+
def get_blob_url(blob: str):
75
+
return f"{self.pds}/xrpc/com.atproto.sync.getBlob?did={self.user_id}&cid={blob}"
118
76
119
-
if embed:
120
-
fexit = handle_embeds(embed)
121
-
if fexit is not None:
122
-
self.log.info("Skipping %s! %s", post_uri, fexit)
123
-
return
77
+
attachments: list[MediaInfo] = []
78
+
if embed.get("$type") == "app.bsky.embed.images":
79
+
model = get_model_or_create(embed, model=models.AppBskyEmbedImages.Main)
80
+
assert isinstance(model, models.AppBskyEmbedImages.Main)
124
81
125
-
if blob_urls:
126
-
blobs: list[Blob] = []
127
-
for url, cid, alt in blob_urls:
128
-
self.log.info("Downloading %s...", cid)
129
-
blob: Blob | None = download_blob(url, alt)
130
-
if not blob:
131
-
self.log.error(
132
-
"Skipping %s! Failed to download blob %s.", post_uri, cid
133
-
)
82
+
for image in model.images:
83
+
url = get_blob_url(image.image.cid.encode())
84
+
LOGGER.info("Downloading %s...", url)
85
+
io = download_media(url, image.alt)
86
+
if not io:
87
+
LOGGER.error("Skipping '%s'. Failed to download media!", post_uri)
134
88
return
135
-
blobs.append(blob)
136
-
post.attachments.put(MediaAttachment(blobs=blobs))
89
+
attachments.append(io)
90
+
elif embed.get("$type") == "app.bsky.embed.video":
91
+
model = get_model_or_create(embed, model=models.AppBskyEmbedVideo.Main)
92
+
assert isinstance(model, models.AppBskyEmbedVideo.Main)
93
+
url = get_blob_url(model.video.cid.encode())
94
+
LOGGER.info("Downloading %s...", url)
95
+
io = download_media(url, model.alt if model.alt else "")
96
+
if not io:
97
+
LOGGER.error("Skipping '%s'. Failed to download media!", post_uri)
98
+
return
99
+
attachments.append(io)
100
+
101
+
cross_post = BlueskyPost(post, tokens, attachments)
102
+
for output in outputs:
103
+
output.accept_post(cross_post)
137
104
138
-
if "langs" in record:
139
-
post.attachments.put(LanguagesAttachment(langs=record["langs"]))
140
-
if "labels" in record:
141
-
post.attachments.put(
142
-
LabelsAttachment(
143
-
labels=[
144
-
label["val"].replace("-", " ") for label in record["values"]
145
-
]
146
-
),
147
-
)
105
+
def _on_delete_post(self, outputs: list[cross.Output], post_id: str, repost: bool):
106
+
post = database.find_post(self.db, post_id, self.user_id, self.service)
107
+
if not post:
108
+
return
148
109
149
-
if parent:
150
-
self._insert_post(
151
-
{
152
-
"user": self.did,
153
-
"service": self.url,
154
-
"identifier": post_uri,
155
-
"parent": parent["id"],
156
-
"root": parent["id"] if not parent["root"] else parent["root"],
157
-
"extra_data": json.dumps({"cid": post_cid}),
158
-
}
159
-
)
110
+
LOGGER.info("Deleting '%s'...", post_id)
111
+
if repost:
112
+
for output in outputs:
113
+
output.delete_repost(post_id)
160
114
else:
161
-
self._insert_post(
162
-
{
163
-
"user": self.did,
164
-
"service": self.url,
165
-
"identifier": post_uri,
166
-
"extra_data": json.dumps({"cid": post_cid}),
167
-
}
168
-
)
115
+
for output in outputs:
116
+
output.delete_post(post_id)
117
+
database.delete_post(self.db, post_id, self.user_id, self.service)
169
118
170
-
for out in self.outputs:
171
-
self.submitter(lambda: out.accept_post(post))
119
+
def _on_repost(self, outputs: list[cross.Output], post: dict[str, Any]):
120
+
post_uri = post["$xpost.strongRef"]["uri"]
121
+
post_cid = post["$xpost.strongRef"]["cid"]
172
122
173
-
def _on_repost(self, record: dict[str, Any]):
174
-
post_uri = cast(str, record["$xpost.strongRef"]["uri"])
175
-
post_cid = cast(str, record["$xpost.strongRef"]["cid"])
123
+
reposted_uri = post["subject"]["uri"]
176
124
177
-
reposted_uri = cast(str, record["subject"]["uri"])
178
-
reposted = self._get_post(self.url, self.did, reposted_uri)
179
-
if not reposted:
180
-
self.log.info(
181
-
"Skipping repost '%s' as reposted post '%s' was not found in the db.",
182
-
post_uri,
183
-
reposted_uri,
184
-
)
125
+
success = database.try_insert_repost(
126
+
self.db, post_uri, reposted_uri, self.user_id, self.service
127
+
)
128
+
if not success:
129
+
LOGGER.info("Skipping '%s' as reposted post was not found in db!", post_uri)
185
130
return
186
-
187
-
self._insert_post(
188
-
{
189
-
"user": self.did,
190
-
"service": self.url,
191
-
"identifier": post_uri,
192
-
"reposted": reposted["id"],
193
-
"extra_data": json.dumps({"cid": post_cid}),
194
-
}
131
+
database.store_data(
132
+
self.db, post_uri, self.user_id, self.service, {"cid": post_cid}
195
133
)
196
134
197
-
for out in self.outputs:
198
-
self.submitter(lambda: out.accept_repost(post_uri, reposted_uri))
135
+
LOGGER.info("Crossposting '%s'...", post_uri)
136
+
for output in outputs:
137
+
output.accept_repost(post_uri, reposted_uri)
199
138
200
-
def _on_delete_post(self, post_id: str, repost: bool):
201
-
post = self._get_post(self.url, self.did, post_id)
202
-
if not post:
203
-
return
204
139
205
-
if repost:
206
-
for output in self.outputs:
207
-
self.submitter(lambda: output.delete_repost(post_id))
208
-
else:
209
-
for output in self.outputs:
210
-
self.submitter(lambda: output.delete_post(post_id))
211
-
self._delete_post_by_id(post["id"])
140
+
class BlueskyJetstreamInput(BlueskyInput):
141
+
def __init__(self, settings: dict, db: DataBaseWorker) -> None:
142
+
super().__init__(settings, db)
143
+
self.jetstream = settings.get(
144
+
"jetstream", "wss://jetstream2.us-east.bsky.network/subscribe"
145
+
)
212
146
147
+
def __on_commit(self, outputs: list[cross.Output], msg: dict):
148
+
if msg.get("did") != self.user_id:
149
+
return
213
150
214
-
class BlueskyJetstreamInputService(BlueskyBaseInputService):
215
-
def __init__(self, db: DatabasePool, options: BlueskyJetstreamInputOptions) -> None:
216
-
super().__init__(db)
217
-
self.options: BlueskyJetstreamInputOptions = options
218
-
self._init_identity()
219
-
220
-
@override
221
-
def get_identity_options(self) -> tuple[str | None, str | None, str | None]:
222
-
return (self.options.handle, self.options.did, self.options.pds)
223
-
224
-
def _accept_msg(self, msg: websockets.Data) -> None:
225
-
data: dict[str, Any] = cast(dict[str, Any], json.loads(msg))
226
-
if data.get("did") != self.did:
227
-
return
228
-
commit: dict[str, Any] | None = data.get("commit")
151
+
commit: dict = msg.get("commit", {})
229
152
if not commit:
230
153
return
231
154
232
-
commit_type: str = cast(str, commit["operation"])
155
+
commit_type = commit["operation"]
233
156
match commit_type:
234
157
case "create":
235
-
record: dict[str, Any] = cast(dict[str, Any], commit["record"])
158
+
record = dict(commit.get("record", {}))
236
159
record["$xpost.strongRef"] = {
237
160
"cid": commit["cid"],
238
-
"uri": f"at://{self.did}/{commit['collection']}/{commit['rkey']}",
161
+
"uri": f"at://{self.user_id}/{commit['collection']}/{commit['rkey']}",
239
162
}
240
163
241
-
match cast(str, commit["collection"]):
164
+
match commit["collection"]:
242
165
case "app.bsky.feed.post":
243
-
self._on_post(record)
166
+
self._on_post(outputs, record)
244
167
case "app.bsky.feed.repost":
245
-
self._on_repost(record)
246
-
case _:
247
-
pass
168
+
self._on_repost(outputs, record)
248
169
case "delete":
249
170
post_id: str = (
250
-
f"at://{self.did}/{commit['collection']}/{commit['rkey']}"
171
+
f"at://{self.user_id}/{commit['collection']}/{commit['rkey']}"
251
172
)
252
-
match cast(str, commit["collection"]):
173
+
match commit["collection"]:
253
174
case "app.bsky.feed.post":
254
-
self._on_delete_post(post_id, False)
175
+
self._on_delete_post(outputs, post_id, False)
255
176
case "app.bsky.feed.repost":
256
-
self._on_delete_post(post_id, True)
257
-
case _:
258
-
pass
259
-
case _:
260
-
pass
177
+
self._on_delete_post(outputs, post_id, True)
261
178
262
-
@override
263
-
async def listen(self):
264
-
url = self.options.jetstream + "?"
265
-
url += "wantedCollections=app.bsky.feed.post"
266
-
url += "&wantedCollections=app.bsky.feed.repost"
267
-
url += f"&wantedDids={self.did}"
179
+
async def listen(
180
+
self, outputs: list[cross.Output], submit: Callable[[Callable[[], Any]], Any]
181
+
):
182
+
uri = self.jetstream + "?"
183
+
uri += "wantedCollections=app.bsky.feed.post"
184
+
uri += "&wantedCollections=app.bsky.feed.repost"
185
+
uri += f"&wantedDids={self.user_id}"
268
186
269
-
async for ws in websockets.connect(url):
187
+
async for ws in websockets.connect(
188
+
uri, extra_headers={"User-Agent": "XPost/0.0.3"}
189
+
):
270
190
try:
271
-
self.log.info("Listening to %s...", self.options.jetstream)
191
+
LOGGER.info("Listening to %s...", self.jetstream)
272
192
273
193
async def listen_for_messages():
274
194
async for msg in ws:
275
-
self.submitter(lambda: self._accept_msg(msg))
195
+
submit(lambda: self.__on_commit(outputs, json.loads(msg)))
276
196
277
197
listen = asyncio.create_task(listen_for_messages())
278
198
279
-
_ = await asyncio.gather(listen)
199
+
await asyncio.gather(listen)
280
200
except websockets.ConnectionClosedError as e:
281
-
self.log.error(e, stack_info=True, exc_info=True)
282
-
self.log.info("Reconnecting to %s...", self.options.jetstream)
201
+
LOGGER.error(e, stack_info=True, exc_info=True)
202
+
LOGGER.info("Reconnecting to %s...", self.jetstream)
283
203
continue
+481
bluesky/output.py
+481
bluesky/output.py
···
1
+
from atproto import Request, client_utils
2
+
from atproto_client import models
3
+
from httpx import Timeout
4
+
5
+
import cross
6
+
import misskey.mfm_util as mfm_util
7
+
import util.database as database
8
+
from bluesky.atproto2 import Client2, resolve_identity
9
+
from bluesky.common import ADULT_PATTERN, PORN_PATTERN, SERVICE, tokens_to_richtext
10
+
from util.database import DataBaseWorker
11
+
from util.media import (
12
+
MediaInfo,
13
+
compress_image,
14
+
convert_to_mp4,
15
+
get_filename_from_url,
16
+
get_media_meta,
17
+
)
18
+
from util.util import LOGGER, as_envvar
19
+
20
+
ALLOWED_GATES = ["mentioned", "following", "followers", "everybody"]
21
+
22
+
23
+
class BlueskyOutputOptions:
24
+
def __init__(self, o: dict) -> None:
25
+
self.quote_gate: bool = False
26
+
self.thread_gate: list[str] = ["everybody"]
27
+
self.encode_videos: bool = True
28
+
29
+
quote_gate = o.get("quote_gate")
30
+
if quote_gate is not None:
31
+
self.quote_gate = bool(quote_gate)
32
+
33
+
thread_gate = o.get("thread_gate")
34
+
if thread_gate is not None:
35
+
if any([v not in ALLOWED_GATES for v in thread_gate]):
36
+
raise ValueError(
37
+
f"'thread_gate' only accepts {', '.join(ALLOWED_GATES)} or [], got: {thread_gate}"
38
+
)
39
+
self.thread_gate = thread_gate
40
+
41
+
encode_videos = o.get("encode_videos")
42
+
if encode_videos is not None:
43
+
self.encode_videos = bool(encode_videos)
44
+
45
+
46
+
class BlueskyOutput(cross.Output):
47
+
def __init__(self, input: cross.Input, settings: dict, db: DataBaseWorker) -> None:
48
+
super().__init__(input, settings, db)
49
+
self.options = BlueskyOutputOptions(settings.get("options") or {})
50
+
51
+
if not as_envvar(settings.get("app-password")):
52
+
raise Exception("Account app password not provided!")
53
+
54
+
did, pds = resolve_identity(
55
+
handle=as_envvar(settings.get("handle")),
56
+
did=as_envvar(settings.get("did")),
57
+
pds=as_envvar(settings.get("pds")),
58
+
)
59
+
60
+
reqs = Request(timeout=Timeout(None, connect=30.0))
61
+
62
+
self.bsky = Client2(pds, request=reqs)
63
+
self.bsky.configure_proxy_header(
64
+
service_type="bsky_appview",
65
+
did=as_envvar(settings.get("bsky_appview")) or "did:web:api.bsky.app",
66
+
)
67
+
self.bsky.login(did, as_envvar(settings.get("app-password")))
68
+
69
+
def __check_login(self):
70
+
login = self.bsky.me
71
+
if not login:
72
+
raise Exception("Client not logged in!")
73
+
return login
74
+
75
+
def _find_parent(self, parent_id: str):
76
+
login = self.__check_login()
77
+
78
+
thread_tuple = database.find_mapped_thread(
79
+
self.db,
80
+
parent_id,
81
+
self.input.user_id,
82
+
self.input.service,
83
+
login.did,
84
+
SERVICE,
85
+
)
86
+
87
+
if not thread_tuple:
88
+
LOGGER.error("Failed to find thread tuple in the database!")
89
+
return None
90
+
91
+
root_uri: str = thread_tuple[0]
92
+
reply_uri: str = thread_tuple[1]
93
+
94
+
root_cid = database.fetch_data(self.db, root_uri, login.did, SERVICE)["cid"]
95
+
reply_cid = database.fetch_data(self.db, root_uri, login.did, SERVICE)["cid"]
96
+
97
+
root_record = models.AppBskyFeedPost.CreateRecordResponse(
98
+
uri=root_uri, cid=root_cid
99
+
)
100
+
reply_record = models.AppBskyFeedPost.CreateRecordResponse(
101
+
uri=reply_uri, cid=reply_cid
102
+
)
103
+
104
+
return (
105
+
models.create_strong_ref(root_record),
106
+
models.create_strong_ref(reply_record),
107
+
thread_tuple[2],
108
+
thread_tuple[3],
109
+
)
110
+
111
+
def _split_attachments(self, attachments: list[MediaInfo]):
112
+
sup_media: list[MediaInfo] = []
113
+
unsup_media: list[MediaInfo] = []
114
+
115
+
for a in attachments:
116
+
if a.mime.startswith("image/") or a.mime.startswith(
117
+
"video/"
118
+
): # TODO convert gifs to videos
119
+
sup_media.append(a)
120
+
else:
121
+
unsup_media.append(a)
122
+
123
+
return (sup_media, unsup_media)
124
+
125
+
def _split_media_per_post(
126
+
self, tokens: list[client_utils.TextBuilder], media: list[MediaInfo]
127
+
):
128
+
posts: list[dict] = [{"tokens": tokens, "attachments": []} for tokens in tokens]
129
+
available_indices: list[int] = list(range(len(posts)))
130
+
131
+
current_image_post_idx: int | None = None
132
+
133
+
def make_blank_post() -> dict:
134
+
return {"tokens": [client_utils.TextBuilder().text("")], "attachments": []}
135
+
136
+
def pop_next_empty_index() -> int:
137
+
if available_indices:
138
+
return available_indices.pop(0)
139
+
else:
140
+
new_idx = len(posts)
141
+
posts.append(make_blank_post())
142
+
return new_idx
143
+
144
+
for att in media:
145
+
if att.mime.startswith("video/"):
146
+
current_image_post_idx = None
147
+
idx = pop_next_empty_index()
148
+
posts[idx]["attachments"].append(att)
149
+
elif att.mime.startswith("image/"):
150
+
if (
151
+
current_image_post_idx is not None
152
+
and len(posts[current_image_post_idx]["attachments"]) < 4
153
+
):
154
+
posts[current_image_post_idx]["attachments"].append(att)
155
+
else:
156
+
idx = pop_next_empty_index()
157
+
posts[idx]["attachments"].append(att)
158
+
current_image_post_idx = idx
159
+
160
+
result: list[tuple[client_utils.TextBuilder, list[MediaInfo]]] = []
161
+
for p in posts:
162
+
result.append((p["tokens"], p["attachments"]))
163
+
return result
164
+
165
+
def accept_post(self, post: cross.Post):
166
+
login = self.__check_login()
167
+
168
+
parent_id = post.get_parent_id()
169
+
170
+
# used for db insertion
171
+
new_root_id = None
172
+
new_parent_id = None
173
+
174
+
root_ref = None
175
+
reply_ref = None
176
+
if parent_id:
177
+
parents = self._find_parent(parent_id)
178
+
if not parents:
179
+
return
180
+
root_ref, reply_ref, new_root_id, new_parent_id = parents
181
+
182
+
tokens = post.get_tokens().copy()
183
+
184
+
unique_labels: set[str] = set()
185
+
cw = post.get_spoiler()
186
+
if cw:
187
+
tokens.insert(0, cross.TextToken("CW: " + cw + "\n\n"))
188
+
unique_labels.add("graphic-media")
189
+
190
+
# from bsky.app, a post can only have one of those labels
191
+
if PORN_PATTERN.search(cw):
192
+
unique_labels.add("porn")
193
+
elif ADULT_PATTERN.search(cw):
194
+
unique_labels.add("sexual")
195
+
196
+
if post.is_sensitive():
197
+
unique_labels.add("graphic-media")
198
+
199
+
labels = (
200
+
models.ComAtprotoLabelDefs.SelfLabels(
201
+
values=[
202
+
models.ComAtprotoLabelDefs.SelfLabel(val=label)
203
+
for label in unique_labels
204
+
]
205
+
)
206
+
if unique_labels
207
+
else None
208
+
)
209
+
210
+
sup_media, unsup_media = self._split_attachments(post.get_attachments())
211
+
212
+
if unsup_media:
213
+
if tokens:
214
+
tokens.append(cross.TextToken("\n"))
215
+
for i, attachment in enumerate(unsup_media):
216
+
tokens.append(
217
+
cross.LinkToken(
218
+
attachment.url, f"[{get_filename_from_url(attachment.url)}]"
219
+
)
220
+
)
221
+
tokens.append(cross.TextToken(" "))
222
+
223
+
if post.get_text_type() == "text/x.misskeymarkdown":
224
+
tokens, status = mfm_util.strip_mfm(tokens)
225
+
post_url = post.get_post_url()
226
+
if status and post_url:
227
+
tokens.append(cross.TextToken("\n"))
228
+
tokens.append(
229
+
cross.LinkToken(post_url, "[Post contains MFM, see original]")
230
+
)
231
+
232
+
split_tokens: list[list[cross.Token]] = cross.split_tokens(tokens, 300)
233
+
post_text: list[client_utils.TextBuilder] = []
234
+
235
+
# convert tokens into rich text. skip post if contains unsupported tokens
236
+
for block in split_tokens:
237
+
rich_text = tokens_to_richtext(block)
238
+
239
+
if not rich_text:
240
+
LOGGER.error(
241
+
"Skipping '%s' as it contains invalid rich text types!",
242
+
post.get_id(),
243
+
)
244
+
return
245
+
post_text.append(rich_text)
246
+
247
+
if not post_text:
248
+
post_text = [client_utils.TextBuilder().text("")]
249
+
250
+
for m in sup_media:
251
+
if m.mime.startswith("image/"):
252
+
if len(m.io) > 2_000_000:
253
+
LOGGER.error(
254
+
"Skipping post_id '%s', failed to download attachment! File too large.",
255
+
post.get_id(),
256
+
)
257
+
return
258
+
259
+
if m.mime.startswith("video/"):
260
+
if m.mime != "video/mp4" and not self.options.encode_videos:
261
+
LOGGER.info(
262
+
"Video is not mp4, but encoding is disabled. Skipping '%s'...",
263
+
post.get_id(),
264
+
)
265
+
return
266
+
267
+
if len(m.io) > 100_000_000:
268
+
LOGGER.error(
269
+
"Skipping post_id '%s', failed to download attachment! File too large?",
270
+
post.get_id(),
271
+
)
272
+
return
273
+
274
+
created_records: list[models.AppBskyFeedPost.CreateRecordResponse] = []
275
+
baked_media = self._split_media_per_post(post_text, sup_media)
276
+
277
+
for text, attachments in baked_media:
278
+
if not attachments:
279
+
if reply_ref and root_ref:
280
+
new_post = self.bsky.send_post(
281
+
text,
282
+
reply_to=models.AppBskyFeedPost.ReplyRef(
283
+
parent=reply_ref, root=root_ref
284
+
),
285
+
labels=labels,
286
+
time_iso=post.get_timestamp(),
287
+
)
288
+
else:
289
+
new_post = self.bsky.send_post(
290
+
text, labels=labels, time_iso=post.get_timestamp()
291
+
)
292
+
root_ref = models.create_strong_ref(new_post)
293
+
294
+
self.bsky.create_gates(
295
+
self.options.thread_gate,
296
+
self.options.quote_gate,
297
+
new_post.uri,
298
+
time_iso=post.get_timestamp(),
299
+
)
300
+
reply_ref = models.create_strong_ref(new_post)
301
+
created_records.append(new_post)
302
+
else:
303
+
# if a single post is an image - everything else is an image
304
+
if attachments[0].mime.startswith("image/"):
305
+
images: list[bytes] = []
306
+
image_alts: list[str] = []
307
+
image_aspect_ratios: list[models.AppBskyEmbedDefs.AspectRatio] = []
308
+
309
+
for attachment in attachments:
310
+
image_io = compress_image(attachment.io, quality=100)
311
+
metadata = get_media_meta(image_io)
312
+
313
+
if len(image_io) > 1_000_000:
314
+
LOGGER.info("Compressing %s...", attachment.name)
315
+
image_io = compress_image(image_io)
316
+
317
+
images.append(image_io)
318
+
image_alts.append(attachment.alt)
319
+
image_aspect_ratios.append(
320
+
models.AppBskyEmbedDefs.AspectRatio(
321
+
width=metadata["width"], height=metadata["height"]
322
+
)
323
+
)
324
+
325
+
new_post = self.bsky.send_images(
326
+
text=post_text[0],
327
+
images=images,
328
+
image_alts=image_alts,
329
+
image_aspect_ratios=image_aspect_ratios,
330
+
reply_to=models.AppBskyFeedPost.ReplyRef(
331
+
parent=reply_ref, root=root_ref
332
+
)
333
+
if root_ref and reply_ref
334
+
else None,
335
+
labels=labels,
336
+
time_iso=post.get_timestamp(),
337
+
)
338
+
if not root_ref:
339
+
root_ref = models.create_strong_ref(new_post)
340
+
341
+
self.bsky.create_gates(
342
+
self.options.thread_gate,
343
+
self.options.quote_gate,
344
+
new_post.uri,
345
+
time_iso=post.get_timestamp(),
346
+
)
347
+
reply_ref = models.create_strong_ref(new_post)
348
+
created_records.append(new_post)
349
+
else: # video is guarantedd to be one
350
+
metadata = get_media_meta(attachments[0].io)
351
+
if metadata["duration"] > 180:
352
+
LOGGER.info(
353
+
"Skipping post_id '%s', video attachment too long!",
354
+
post.get_id(),
355
+
)
356
+
return
357
+
358
+
video_io = attachments[0].io
359
+
if attachments[0].mime != "video/mp4":
360
+
LOGGER.info("Converting %s to mp4...", attachments[0].name)
361
+
video_io = convert_to_mp4(video_io)
362
+
363
+
aspect_ratio = models.AppBskyEmbedDefs.AspectRatio(
364
+
width=metadata["width"], height=metadata["height"]
365
+
)
366
+
367
+
new_post = self.bsky.send_video(
368
+
text=post_text[0],
369
+
video=video_io,
370
+
video_aspect_ratio=aspect_ratio,
371
+
video_alt=attachments[0].alt,
372
+
reply_to=models.AppBskyFeedPost.ReplyRef(
373
+
parent=reply_ref, root=root_ref
374
+
)
375
+
if root_ref and reply_ref
376
+
else None,
377
+
labels=labels,
378
+
time_iso=post.get_timestamp(),
379
+
)
380
+
if not root_ref:
381
+
root_ref = models.create_strong_ref(new_post)
382
+
383
+
self.bsky.create_gates(
384
+
self.options.thread_gate,
385
+
self.options.quote_gate,
386
+
new_post.uri,
387
+
time_iso=post.get_timestamp(),
388
+
)
389
+
reply_ref = models.create_strong_ref(new_post)
390
+
created_records.append(new_post)
391
+
392
+
db_post = database.find_post(
393
+
self.db, post.get_id(), self.input.user_id, self.input.service
394
+
)
395
+
assert db_post, "ghghghhhhh"
396
+
397
+
if new_root_id is None or new_parent_id is None:
398
+
new_root_id = database.insert_post(
399
+
self.db, created_records[0].uri, login.did, SERVICE
400
+
)
401
+
database.store_data(
402
+
self.db,
403
+
created_records[0].uri,
404
+
login.did,
405
+
SERVICE,
406
+
{"cid": created_records[0].cid},
407
+
)
408
+
409
+
new_parent_id = new_root_id
410
+
database.insert_mapping(self.db, db_post["id"], new_parent_id)
411
+
created_records = created_records[1:]
412
+
413
+
for record in created_records:
414
+
new_parent_id = database.insert_reply(
415
+
self.db, record.uri, login.did, SERVICE, new_parent_id, new_root_id
416
+
)
417
+
database.store_data(
418
+
self.db, record.uri, login.did, SERVICE, {"cid": record.cid}
419
+
)
420
+
database.insert_mapping(self.db, db_post["id"], new_parent_id)
421
+
422
+
def delete_post(self, identifier: str):
423
+
login = self.__check_login()
424
+
425
+
post = database.find_post(
426
+
self.db, identifier, self.input.user_id, self.input.service
427
+
)
428
+
if not post:
429
+
return
430
+
431
+
mappings = database.find_mappings(self.db, post["id"], SERVICE, login.did)
432
+
for mapping in mappings[::-1]:
433
+
LOGGER.info("Deleting '%s'...", mapping[0])
434
+
self.bsky.delete_post(mapping[0])
435
+
database.delete_post(self.db, mapping[0], SERVICE, login.did)
436
+
437
+
def accept_repost(self, repost_id: str, reposted_id: str):
438
+
login, repost = self.__delete_repost(repost_id)
439
+
if not (login and repost):
440
+
return
441
+
442
+
reposted = database.find_post(
443
+
self.db, reposted_id, self.input.user_id, self.input.service
444
+
)
445
+
if not reposted:
446
+
return
447
+
448
+
# mappings of the reposted post
449
+
mappings = database.find_mappings(self.db, reposted["id"], SERVICE, login.did)
450
+
if mappings:
451
+
cid = database.fetch_data(self.db, mappings[0][0], login.did, SERVICE)[
452
+
"cid"
453
+
]
454
+
rsp = self.bsky.repost(mappings[0][0], cid)
455
+
456
+
internal_id = database.insert_repost(
457
+
self.db, rsp.uri, reposted["id"], login.did, SERVICE
458
+
)
459
+
database.store_data(self.db, rsp.uri, login.did, SERVICE, {"cid": rsp.cid})
460
+
database.insert_mapping(self.db, repost["id"], internal_id)
461
+
462
+
def __delete_repost(
463
+
self, repost_id: str
464
+
) -> tuple[models.AppBskyActorDefs.ProfileViewDetailed | None, dict | None]:
465
+
login = self.__check_login()
466
+
467
+
repost = database.find_post(
468
+
self.db, repost_id, self.input.user_id, self.input.service
469
+
)
470
+
if not repost:
471
+
return None, None
472
+
473
+
mappings = database.find_mappings(self.db, repost["id"], SERVICE, login.did)
474
+
if mappings:
475
+
LOGGER.info("Deleting '%s'...", mappings[0][0])
476
+
self.bsky.unrepost(mappings[0][0])
477
+
database.delete_post(self.db, mappings[0][0], login.did, SERVICE)
478
+
return login, repost
479
+
480
+
def delete_repost(self, repost_id: str):
481
+
self.__delete_repost(repost_id)
-95
bluesky/tokens.py
-95
bluesky/tokens.py
···
1
-
from cross.tokens import LinkToken, MentionToken, TagToken, TextToken, Token
2
-
3
-
4
-
def tokenize_post(text: str, facets: list[dict]) -> list[Token]:
5
-
def decode(ut8: bytes) -> str:
6
-
return ut8.decode(encoding="utf-8")
7
-
8
-
if not text:
9
-
return []
10
-
ut8_text = text.encode(encoding="utf-8")
11
-
if not facets:
12
-
return [TextToken(text=decode(ut8_text))]
13
-
14
-
slices: list[tuple[int, int, str, str]] = []
15
-
16
-
for facet in facets:
17
-
features: list[dict] = facet.get("features", [])
18
-
if not features:
19
-
continue
20
-
21
-
# we don't support overlapping facets/features
22
-
feature = features[0]
23
-
feature_type = feature["$type"]
24
-
index = facet["index"]
25
-
match feature_type:
26
-
case "app.bsky.richtext.facet#tag":
27
-
slices.append(
28
-
(index["byteStart"], index["byteEnd"], "tag", feature["tag"])
29
-
)
30
-
case "app.bsky.richtext.facet#link":
31
-
slices.append(
32
-
(index["byteStart"], index["byteEnd"], "link", feature["uri"])
33
-
)
34
-
case "app.bsky.richtext.facet#mention":
35
-
slices.append(
36
-
(index["byteStart"], index["byteEnd"], "mention", feature["did"])
37
-
)
38
-
39
-
if not slices:
40
-
return [TextToken(text=decode(ut8_text))]
41
-
42
-
slices.sort(key=lambda s: s[0])
43
-
unique: list[tuple[int, int, str, str]] = []
44
-
current_end = 0
45
-
for start, end, ttype, val in slices:
46
-
if start >= current_end:
47
-
unique.append((start, end, ttype, val))
48
-
current_end = end
49
-
50
-
if not unique:
51
-
return [TextToken(text=decode(ut8_text))]
52
-
53
-
tokens: list[Token] = []
54
-
prev = 0
55
-
56
-
for start, end, ttype, val in unique:
57
-
if start > prev:
58
-
# text between facets
59
-
tokens.append(TextToken(text=decode(ut8_text[prev:start])))
60
-
# facet token
61
-
match ttype:
62
-
case "link":
63
-
label = decode(ut8_text[start:end])
64
-
65
-
# try to unflatten links
66
-
split = val.split("://", 1)
67
-
if len(split) > 1:
68
-
if split[1].startswith(label):
69
-
tokens.append(LinkToken(href=val))
70
-
prev = end
71
-
continue
72
-
73
-
if label.endswith("...") and split[1].startswith(label[:-3]):
74
-
tokens.append(LinkToken(href=val))
75
-
prev = end
76
-
continue
77
-
78
-
tokens.append(LinkToken(href=val, label=label))
79
-
case "tag":
80
-
tag = decode(ut8_text[start:end])
81
-
tokens.append(TagToken(tag=tag[1:] if tag.startswith("#") else tag))
82
-
case "mention":
83
-
mention = decode(ut8_text[start:end])
84
-
tokens.append(
85
-
MentionToken(
86
-
username=mention[1:] if mention.startswith("@") else mention,
87
-
uri=val,
88
-
)
89
-
)
90
-
prev = end
91
-
92
-
if prev < len(ut8_text):
93
-
tokens.append(TextToken(text=decode(ut8_text[prev:])))
94
-
95
-
return tokens
-37
cross/attachments.py
-37
cross/attachments.py
···
1
-
from dataclasses import dataclass
2
-
3
-
from cross.media import Blob
4
-
5
-
6
-
@dataclass(kw_only=True)
7
-
class Attachment:
8
-
pass
9
-
10
-
11
-
@dataclass(kw_only=True)
12
-
class LabelsAttachment(Attachment):
13
-
labels: list[str]
14
-
15
-
16
-
@dataclass(kw_only=True)
17
-
class LanguagesAttachment(Attachment):
18
-
langs: list[str]
19
-
20
-
21
-
@dataclass(kw_only=True)
22
-
class SensitiveAttachment(Attachment):
23
-
sensitive: bool
24
-
25
-
26
-
@dataclass(kw_only=True)
27
-
class RemoteUrlAttachment(Attachment):
28
-
url: str
29
-
30
-
@dataclass(kw_only=True)
31
-
class MediaAttachment(Attachment):
32
-
blobs: list[Blob]
33
-
34
-
@dataclass(kw_only=True)
35
-
class QuoteAttachment(Attachment):
36
-
quoted_id: str
37
-
quoted_user: str
-170
cross/media.py
-170
cross/media.py
···
1
-
from dataclasses import dataclass, field
2
-
3
-
import json
4
-
import re
5
-
import os
6
-
from typing import Any, cast
7
-
import magic
8
-
import subprocess
9
-
import urllib.parse
10
-
11
-
import requests
12
-
13
-
FILENAME = re.compile(r'filename="?([^\";]*)"?')
14
-
MAGIC = magic.Magic(mime=True)
15
-
16
-
17
-
@dataclass
18
-
class Blob:
19
-
url: str
20
-
mime: str
21
-
io: bytes = field(repr=False)
22
-
name: str | None = None
23
-
alt: str | None = None
24
-
25
-
26
-
@dataclass
27
-
class MediaInfo:
28
-
width: int
29
-
height: int
30
-
duration: float | None = None
31
-
32
-
33
-
def mime_from_bytes(io: bytes) -> str:
34
-
mime = MAGIC.from_buffer(io)
35
-
if not mime:
36
-
mime = "application/octet-stream"
37
-
return mime
38
-
39
-
def download_blob(url: str, alt: str | None = None, max_bytes: int = 100_000_000) -> Blob | None:
40
-
name = get_filename_from_url(url)
41
-
io = download_chuncked(url, max_bytes)
42
-
if not io:
43
-
return None
44
-
return Blob(url, mime_from_bytes(io), io, name, alt)
45
-
46
-
def download_chuncked(url: str, max_bytes: int = 100_000_000) -> bytes | None:
47
-
response = requests.get(url, stream=True, timeout=20)
48
-
if response.status_code != 200:
49
-
return None
50
-
51
-
downloaded_bytes = b""
52
-
current_size = 0
53
-
54
-
for chunk in response.iter_content(chunk_size=8192):
55
-
if not chunk:
56
-
continue
57
-
58
-
current_size += len(chunk)
59
-
if current_size > max_bytes:
60
-
response.close()
61
-
return None
62
-
63
-
downloaded_bytes += chunk
64
-
65
-
return downloaded_bytes
66
-
67
-
68
-
def get_filename_from_url(url: str) -> str:
69
-
try:
70
-
response = requests.head(url, timeout=5, allow_redirects=True)
71
-
disposition = response.headers.get("Content-Disposition")
72
-
if disposition:
73
-
filename = FILENAME.findall(disposition)
74
-
if filename:
75
-
return filename[0]
76
-
except requests.RequestException:
77
-
pass
78
-
79
-
parsed_url = urllib.parse.urlparse(url)
80
-
base_name = os.path.basename(parsed_url.path)
81
-
82
-
# hardcoded fix to return the cid for pds blobs
83
-
if base_name == "com.atproto.sync.getBlob":
84
-
qs = urllib.parse.parse_qs(parsed_url.query)
85
-
if qs and qs.get("cid"):
86
-
return qs["cid"][0]
87
-
88
-
return base_name
89
-
90
-
91
-
def convert_to_mp4(video: Blob) -> Blob:
92
-
cmd = [
93
-
"ffmpeg",
94
-
"-i", "pipe:0",
95
-
"-c:v", "libx264",
96
-
"-crf", "30",
97
-
"-preset", "slow",
98
-
"-c:a", "aac",
99
-
"-b:a", "128k",
100
-
"-movflags", "frag_keyframe+empty_moov+default_base_moof",
101
-
"-f", "mp4",
102
-
"pipe:1",
103
-
]
104
-
105
-
proc = subprocess.Popen(
106
-
cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE
107
-
)
108
-
out_bytes, err = proc.communicate(input=video.io)
109
-
110
-
if proc.returncode != 0:
111
-
raise RuntimeError(f"ffmpeg compress failed: {err.decode()}")
112
-
113
-
return Blob(video.url, mime_from_bytes(out_bytes), out_bytes, video.name, video.alt)
114
-
115
-
116
-
def compress_image(image: Blob, quality: int = 95) -> Blob:
117
-
cmd = [
118
-
"ffmpeg",
119
-
"-f", "image2pipe",
120
-
"-i", "pipe:0",
121
-
"-c:v", "webp",
122
-
"-q:v", str(quality),
123
-
"-f", "image2pipe",
124
-
"pipe:1",
125
-
]
126
-
127
-
proc = subprocess.Popen(
128
-
cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE
129
-
)
130
-
out_bytes, err = proc.communicate(input=image.io)
131
-
132
-
if proc.returncode != 0:
133
-
raise RuntimeError(f"ffmpeg compress failed: {err.decode()}")
134
-
135
-
return Blob(image.url, "image/webp", out_bytes, image.name, image.alt)
136
-
137
-
138
-
def probe_bytes(bytes: bytes) -> dict[str, Any]:
139
-
cmd = [
140
-
"ffprobe",
141
-
"-v",
142
-
"error",
143
-
"-show_format",
144
-
"-show_streams",
145
-
"-print_format",
146
-
"json",
147
-
"pipe:0",
148
-
]
149
-
proc = subprocess.run(
150
-
cmd, input=bytes, stdout=subprocess.PIPE, stderr=subprocess.PIPE
151
-
)
152
-
153
-
if proc.returncode != 0:
154
-
raise RuntimeError(f"ffprobe failed: {proc.stderr.decode()}")
155
-
156
-
return json.loads(proc.stdout)
157
-
158
-
159
-
def get_media_meta(bytes: bytes) -> MediaInfo:
160
-
probe = probe_bytes(bytes)
161
-
streams = [s for s in probe["streams"] if s["codec_type"] == "video"]
162
-
if not streams:
163
-
raise ValueError("No video stream found")
164
-
165
-
media: dict[str, Any] = cast(dict[str, Any], streams[0])
166
-
return MediaInfo(
167
-
width=media["width"],
168
-
height=media["height"],
169
-
duration=media.get("duration", probe["format"].get("duration")),
170
-
)
-35
cross/post.py
-35
cross/post.py
···
1
-
from dataclasses import dataclass, field
2
-
from typing import TypeVar
3
-
4
-
from cross.attachments import Attachment
5
-
from cross.tokens import Token
6
-
7
-
T = TypeVar("T", bound=Attachment)
8
-
9
-
10
-
class AttachmentKeeper:
11
-
def __init__(self) -> None:
12
-
self._map: dict[type, Attachment] = {}
13
-
14
-
def put(self, attachment: Attachment) -> None:
15
-
self._map[attachment.__class__] = attachment
16
-
17
-
def get(self, cls: type[T]) -> T | None:
18
-
instance = self._map.get(cls)
19
-
if instance is None:
20
-
return None
21
-
if not isinstance(instance, cls):
22
-
raise TypeError(f"Expected {cls.__name__}, got {type(instance).__name__}")
23
-
return instance
24
-
25
-
def __repr__(self) -> str:
26
-
return f"AttachmentKeeper(_map={self._map.values()})"
27
-
28
-
29
-
@dataclass
30
-
class Post:
31
-
id: str
32
-
parent_id: str | None
33
-
tokens: list[Token]
34
-
text_type: str = "text/plain"
35
-
attachments: AttachmentKeeper = field(default_factory=AttachmentKeeper)
-159
cross/service.py
-159
cross/service.py
···
1
-
import logging
2
-
import sqlite3
3
-
from abc import ABC, abstractmethod
4
-
from typing import Any, Callable, cast
5
-
6
-
from cross.post import Post
7
-
from database.connection import DatabasePool
8
-
9
-
columns: list[str] = [
10
-
"user",
11
-
"service",
12
-
"identifier",
13
-
"parent",
14
-
"root",
15
-
"reposted",
16
-
"extra_data",
17
-
]
18
-
placeholders: str = ", ".join(["?" for _ in columns])
19
-
column_names: str = ", ".join(columns)
20
-
21
-
22
-
class Service:
23
-
def __init__(self, url: str, db: DatabasePool) -> None:
24
-
self.url: str = url
25
-
self.db: DatabasePool = db
26
-
self.log: logging.Logger = logging.getLogger(self.__class__.__name__)
27
-
# self._lock: threading.Lock = threading.Lock()
28
-
29
-
def _get_post(self, url: str, user: str, identifier: str) -> sqlite3.Row | None:
30
-
cursor = self.db.get_conn().cursor()
31
-
_ = cursor.execute(
32
-
"""
33
-
SELECT * FROM posts
34
-
WHERE service = ?
35
-
AND user = ?
36
-
AND identifier = ?
37
-
""",
38
-
(url, user, identifier),
39
-
)
40
-
return cast(sqlite3.Row, cursor.fetchone())
41
-
42
-
def _get_post_by_id(self, id: int) -> sqlite3.Row | None:
43
-
cursor = self.db.get_conn().cursor()
44
-
_ = cursor.execute("SELECT * FROM posts WHERE id = ?", (id,))
45
-
return cast(sqlite3.Row, cursor.fetchone())
46
-
47
-
def _get_mappings(
48
-
self, original: int, service: str, user: str
49
-
) -> list[sqlite3.Row]:
50
-
cursor = self.db.get_conn().cursor()
51
-
_ = cursor.execute(
52
-
"""
53
-
SELECT *
54
-
FROM posts AS p
55
-
JOIN mappings AS m
56
-
ON p.id = m.mapped
57
-
WHERE m.original = ?
58
-
AND p.service = ?
59
-
AND p.user = ?
60
-
ORDER BY p.id;
61
-
""",
62
-
(original, service, user),
63
-
)
64
-
return cursor.fetchall()
65
-
66
-
def _find_mapped_thread(
67
-
self, parent: str, iservice: str, iuser: str, oservice: str, ouser: str
68
-
):
69
-
reply_data = self._get_post(iservice, iuser, parent)
70
-
if not reply_data:
71
-
return None
72
-
73
-
reply_mappings: list[sqlite3.Row] | None = self._get_mappings(
74
-
reply_data["id"], oservice, ouser
75
-
)
76
-
if not reply_mappings:
77
-
return None
78
-
79
-
reply_identifier: sqlite3.Row = reply_mappings[-1]
80
-
root_identifier: sqlite3.Row = reply_mappings[0]
81
-
82
-
if reply_data["root_id"]:
83
-
root_data = self._get_post_by_id(reply_data["root_id"])
84
-
if not root_data:
85
-
return None
86
-
87
-
root_mappings = self._get_mappings(reply_data["root_id"], oservice, ouser)
88
-
if not root_mappings:
89
-
return None
90
-
root_identifier = root_mappings[0]
91
-
92
-
return (
93
-
root_identifier[0], # real ids
94
-
reply_identifier[0],
95
-
reply_data["root_id"], # db ids
96
-
reply_data["id"],
97
-
)
98
-
99
-
def _insert_post(self, post_data: dict[str, Any]):
100
-
values = [post_data.get(col) for col in columns]
101
-
cursor = self.db.get_conn().cursor()
102
-
_ = cursor.execute(
103
-
f"INSERT INTO posts ({column_names}) VALUES ({placeholders})", values
104
-
)
105
-
106
-
def _insert_post_mapping(self, original: int, mapped: int):
107
-
cursor = self.db.get_conn().cursor()
108
-
_ = cursor.execute(
109
-
"INSERT OR IGNORE INTO mappings (original, mapped) VALUES (?, ?);",
110
-
(original, mapped),
111
-
)
112
-
_ = cursor.execute(
113
-
"INSERT OR IGNORE INTO mappings (original, mapped) VALUES (?, ?);",
114
-
(mapped, original),
115
-
)
116
-
117
-
def _delete_post(self, url: str, user: str, identifier: str):
118
-
cursor = self.db.get_conn().cursor()
119
-
_ = cursor.execute(
120
-
"""
121
-
DELETE FROM posts
122
-
WHERE identifier = ?
123
-
AND service = ?
124
-
AND user = ?
125
-
""",
126
-
(identifier, url, user),
127
-
)
128
-
129
-
def _delete_post_by_id(self, id: int):
130
-
cursor = self.db.get_conn().cursor()
131
-
_ = cursor.execute("DELETE FROM posts WHERE id = ?", (id,))
132
-
133
-
134
-
class OutputService(Service):
135
-
def accept_post(self, service: str, user: str, post: Post):
136
-
self.log.warning("NOT IMPLEMENTED (%s), accept_post %s", self.url, post.id)
137
-
138
-
def delete_post(self, service: str, user: str, post_id: str):
139
-
self.log.warning("NOT IMPLEMENTED (%s), delete_post %s", self.url, post_id)
140
-
141
-
def accept_repost(self, service: str, user: str, repost_id: str, reposted_id: str):
142
-
self.log.warning(
143
-
"NOT IMPLEMENTED (%s), accept_repost %s of %s",
144
-
self.url,
145
-
repost_id,
146
-
reposted_id,
147
-
)
148
-
149
-
def delete_repost(self, service: str, user: str, repost_id: str):
150
-
self.log.warning("NOT IMPLEMENTED (%s), delete_repost %s", self.url, repost_id)
151
-
152
-
153
-
class InputService(ABC, Service):
154
-
outputs: list[OutputService]
155
-
submitter: Callable[[Callable[[], None]], None]
156
-
157
-
@abstractmethod
158
-
async def listen(self):
159
-
pass
-23
cross/tokens.py
-23
cross/tokens.py
···
1
-
from dataclasses import dataclass
2
-
3
-
@dataclass(kw_only=True)
4
-
class Token:
5
-
pass
6
-
7
-
@dataclass(kw_only=True)
8
-
class TextToken(Token):
9
-
text: str
10
-
11
-
@dataclass(kw_only=True)
12
-
class LinkToken(Token):
13
-
href: str
14
-
label: str | None = None
15
-
16
-
@dataclass(kw_only=True)
17
-
class TagToken(Token):
18
-
tag: str
19
-
20
-
@dataclass(kw_only=True)
21
-
class MentionToken(Token):
22
-
username: str
23
-
uri: str | None = None
+237
cross.py
+237
cross.py
···
1
+
import re
2
+
from abc import ABC, abstractmethod
3
+
from datetime import datetime, timezone
4
+
from typing import Any, Callable
5
+
6
+
from util.database import DataBaseWorker
7
+
from util.media import MediaInfo
8
+
from util.util import LOGGER, canonical_label
9
+
10
+
ALTERNATE = re.compile(r"\S+|\s+")
11
+
12
+
13
+
# generic token
14
+
class Token:
15
+
def __init__(self, type: str) -> None:
16
+
self.type = type
17
+
18
+
19
+
class TextToken(Token):
20
+
def __init__(self, text: str) -> None:
21
+
super().__init__("text")
22
+
self.text = text
23
+
24
+
25
+
# token that represents a link to a website. e.g. [link](https://google.com/)
26
+
class LinkToken(Token):
27
+
def __init__(self, href: str, label: str) -> None:
28
+
super().__init__("link")
29
+
self.href = href
30
+
self.label = label
31
+
32
+
33
+
# token that represents a hashtag. e.g. #SocialMedia
34
+
class TagToken(Token):
35
+
def __init__(self, tag: str) -> None:
36
+
super().__init__("tag")
37
+
self.tag = tag
38
+
39
+
40
+
# token that represents a mention of a user.
41
+
class MentionToken(Token):
42
+
def __init__(self, username: str, uri: str) -> None:
43
+
super().__init__("mention")
44
+
self.username = username
45
+
self.uri = uri
46
+
47
+
48
+
class MediaMeta:
49
+
def __init__(self, width: int, height: int, duration: float) -> None:
50
+
self.width = width
51
+
self.height = height
52
+
self.duration = duration
53
+
54
+
def get_width(self) -> int:
55
+
return self.width
56
+
57
+
def get_height(self) -> int:
58
+
return self.height
59
+
60
+
def get_duration(self) -> float:
61
+
return self.duration
62
+
63
+
64
+
class Post(ABC):
65
+
@abstractmethod
66
+
def get_id(self) -> str:
67
+
return ""
68
+
69
+
@abstractmethod
70
+
def get_parent_id(self) -> str | None:
71
+
pass
72
+
73
+
@abstractmethod
74
+
def get_tokens(self) -> list[Token]:
75
+
pass
76
+
77
+
# returns input text type.
78
+
# text/plain, text/markdown, text/x.misskeymarkdown
79
+
@abstractmethod
80
+
def get_text_type(self) -> str:
81
+
pass
82
+
83
+
# post iso timestamp
84
+
@abstractmethod
85
+
def get_timestamp(self) -> str:
86
+
pass
87
+
88
+
def get_attachments(self) -> list[MediaInfo]:
89
+
return []
90
+
91
+
def get_spoiler(self) -> str | None:
92
+
return None
93
+
94
+
def get_languages(self) -> list[str]:
95
+
return []
96
+
97
+
def is_sensitive(self) -> bool:
98
+
return False
99
+
100
+
def get_post_url(self) -> str | None:
101
+
return None
102
+
103
+
104
+
# generic input service.
105
+
# user and service for db queries
106
+
class Input:
107
+
def __init__(
108
+
self, service: str, user_id: str, settings: dict, db: DataBaseWorker
109
+
) -> None:
110
+
self.service = service
111
+
self.user_id = user_id
112
+
self.settings = settings
113
+
self.db = db
114
+
115
+
async def listen(self, outputs: list, handler: Callable[[Post], Any]):
116
+
pass
117
+
118
+
119
+
class Output:
120
+
def __init__(self, input: Input, settings: dict, db: DataBaseWorker) -> None:
121
+
self.input = input
122
+
self.settings = settings
123
+
self.db = db
124
+
125
+
def accept_post(self, post: Post):
126
+
LOGGER.warning('Not Implemented.. "posted" %s', post.get_id())
127
+
128
+
def delete_post(self, identifier: str):
129
+
LOGGER.warning('Not Implemented.. "deleted" %s', identifier)
130
+
131
+
def accept_repost(self, repost_id: str, reposted_id: str):
132
+
LOGGER.warning('Not Implemented.. "reblogged" %s, %s', repost_id, reposted_id)
133
+
134
+
def delete_repost(self, repost_id: str):
135
+
LOGGER.warning('Not Implemented.. "removed reblog" %s', repost_id)
136
+
137
+
138
+
def test_filters(tokens: list[Token], filters: list[re.Pattern[str]]):
139
+
if not tokens or not filters:
140
+
return True
141
+
142
+
markdown = ""
143
+
144
+
for token in tokens:
145
+
if isinstance(token, TextToken):
146
+
markdown += token.text
147
+
elif isinstance(token, LinkToken):
148
+
markdown += f"[{token.label}]({token.href})"
149
+
elif isinstance(token, TagToken):
150
+
markdown += "#" + token.tag
151
+
elif isinstance(token, MentionToken):
152
+
markdown += token.username
153
+
154
+
for filter in filters:
155
+
if filter.search(markdown):
156
+
return False
157
+
158
+
return True
159
+
160
+
161
+
def split_tokens(
162
+
tokens: list[Token], max_chars: int, max_link_len: int = 35
163
+
) -> list[list[Token]]:
164
+
def new_block():
165
+
nonlocal blocks, block, length
166
+
if block:
167
+
blocks.append(block)
168
+
block = []
169
+
length = 0
170
+
171
+
def append_text(text_segment):
172
+
nonlocal block
173
+
# if the last element in the current block is also text, just append to it
174
+
if block and isinstance(block[-1], TextToken):
175
+
block[-1].text += text_segment
176
+
else:
177
+
block.append(TextToken(text_segment))
178
+
179
+
blocks: list[list[Token]] = []
180
+
block: list[Token] = []
181
+
length = 0
182
+
183
+
for tk in tokens:
184
+
if isinstance(tk, TagToken):
185
+
tag_len = 1 + len(tk.tag) # (#) + tag
186
+
if length + tag_len > max_chars:
187
+
new_block() # create new block if the current one is too large
188
+
189
+
block.append(tk)
190
+
length += tag_len
191
+
elif isinstance(tk, LinkToken): # TODO labels should proably be split too
192
+
link_len = len(tk.label)
193
+
if canonical_label(
194
+
tk.label, tk.href
195
+
): # cut down the link if the label is canonical
196
+
link_len = min(link_len, max_link_len)
197
+
198
+
if length + link_len > max_chars:
199
+
new_block()
200
+
block.append(tk)
201
+
length += link_len
202
+
elif isinstance(tk, TextToken):
203
+
segments: list[str] = ALTERNATE.findall(tk.text)
204
+
205
+
for seg in segments:
206
+
seg_len: int = len(seg)
207
+
if length + seg_len <= max_chars - (0 if seg.isspace() else 1):
208
+
append_text(seg)
209
+
length += seg_len
210
+
continue
211
+
212
+
if length > 0:
213
+
new_block()
214
+
215
+
if not seg.isspace():
216
+
while len(seg) > max_chars - 1:
217
+
chunk = seg[: max_chars - 1] + "-"
218
+
append_text(chunk)
219
+
new_block()
220
+
seg = seg[max_chars - 1 :]
221
+
else:
222
+
while len(seg) > max_chars:
223
+
chunk = seg[:max_chars]
224
+
append_text(chunk)
225
+
new_block()
226
+
seg = seg[max_chars:]
227
+
228
+
if seg:
229
+
append_text(seg)
230
+
length = len(seg)
231
+
else: # TODO fix mentions
232
+
block.append(tk)
233
+
234
+
if block:
235
+
blocks.append(block)
236
+
237
+
return blocks
-32
database/connection.py
-32
database/connection.py
···
1
-
import sqlite3
2
-
import threading
3
-
from pathlib import Path
4
-
5
-
6
-
class DatabasePool:
7
-
def __init__(self, db: Path) -> None:
8
-
self.db: Path = db
9
-
self._local: threading.local = threading.local()
10
-
self._conns: list[sqlite3.Connection] = []
11
-
12
-
def get_conn(self) -> sqlite3.Connection:
13
-
if getattr(self._local, 'conn', None) is None:
14
-
self._local.conn = get_conn(self.db)
15
-
self._conns.append(self._local.conn)
16
-
return self._local.conn
17
-
18
-
def close(self):
19
-
for c in self._conns:
20
-
c.close()
21
-
22
-
def get_conn(db: Path) -> sqlite3.Connection:
23
-
conn = sqlite3.connect(db, autocommit=True, check_same_thread=False)
24
-
conn.row_factory = sqlite3.Row
25
-
_ = conn.executescript("""
26
-
PRAGMA journal_mode = WAL;
27
-
PRAGMA mmap_size = 134217728;
28
-
PRAGMA cache_size = 4000;
29
-
PRAGMA synchronous = NORMAL;
30
-
PRAGMA foreign_keys = ON;
31
-
""")
32
-
return conn
-54
database/migrations.py
-54
database/migrations.py
···
1
-
import sqlite3
2
-
from pathlib import Path
3
-
from typing import Callable
4
-
5
-
from database.connection import get_conn
6
-
from util.util import LOGGER
7
-
8
-
class DatabaseMigrator:
9
-
def __init__(self, db_path: Path, migrations_folder: Path) -> None:
10
-
self.db_path: Path = db_path
11
-
self.migrations_folder: Path = migrations_folder
12
-
self.conn: sqlite3.Connection = get_conn(db_path)
13
-
_ = self.conn.execute("PRAGMA foreign_keys = OFF;")
14
-
self.conn.autocommit = False
15
-
16
-
def close(self):
17
-
self.conn.close()
18
-
19
-
def get_version(self) -> int:
20
-
cursor = self.conn.cursor()
21
-
_ = cursor.execute("PRAGMA user_version")
22
-
return int(cursor.fetchone()[0])
23
-
24
-
def set_version(self, version: int):
25
-
cursor = self.conn.cursor()
26
-
_ = cursor.execute(f"PRAGMA user_version = {version}")
27
-
self.conn.commit()
28
-
29
-
def apply_migration(self, version: int, filename: str, migration: Callable[[sqlite3.Connection], None]):
30
-
try:
31
-
_ = migration(self.conn)
32
-
self.set_version(version)
33
-
self.conn.commit()
34
-
LOGGER.info("Applied migration: %s..", filename)
35
-
except sqlite3.Error as e:
36
-
self.conn.rollback()
37
-
raise Exception(f"Error applying migration {filename}: {e}")
38
-
39
-
def migrate(self):
40
-
current_version = self.get_version()
41
-
from migrations._registry import load_migrations
42
-
migrations = load_migrations(self.migrations_folder)
43
-
44
-
if not migrations:
45
-
LOGGER.warning("No migration files found.")
46
-
return
47
-
48
-
pending = [m for m in migrations if m[0] > current_version]
49
-
if not pending:
50
-
LOGGER.info("No pending migrations.")
51
-
return
52
-
53
-
for version, filename, migration in pending:
54
-
self.apply_migration(version, filename, migration)
-13
env.py
-13
env.py
···
1
-
import os
2
-
from pathlib import Path
3
-
4
-
DEV = bool(os.environ.get("DEV")) or False
5
-
6
-
DATA_DIR = Path(os.environ.get("DATA_DIR") or "./data")
7
-
CACHE_DIR = Path(os.environ.get("CACHE_DIR") or DATA_DIR.joinpath("cache"))
8
-
SETTINGS_DIR = Path(os.environ.get("SETTINGS_DIR") or DATA_DIR.joinpath("settings.json"))
9
-
DATABASE_DIR = Path(os.environ.get("DATABASE_DIR") or DATA_DIR.joinpath("data.db"))
10
-
11
-
MIGRATIONS_DIR = Path(os.environ.get("MIGRATIONS_DIR") or "./migrations")
12
-
13
-
PLC_HOST = os.environ.get("PLC_HOST") or "https://plc.directory"
+121
-49
main.py
+121
-49
main.py
···
1
1
import asyncio
2
2
import json
3
+
import os
3
4
import queue
4
5
import threading
5
-
from pathlib import Path
6
-
from typing import Callable
6
+
import traceback
7
7
8
-
from database.connection import DatabasePool
9
-
import env
10
-
from database.migrations import DatabaseMigrator
11
-
from registry import create_input_service, create_output_service
12
-
from registry_bootstrap import bootstrap
13
-
from util.util import LOGGER, read_env, shutdown_hook
8
+
import cross
9
+
import util.database as database
10
+
from bluesky.input import BlueskyJetstreamInput
11
+
from bluesky.output import BlueskyOutput, BlueskyOutputOptions
12
+
from mastodon.input import MastodonInput, MastodonInputOptions
13
+
from mastodon.output import MastodonOutput
14
+
from misskey.input import MisskeyInput
15
+
from util.util import LOGGER, as_json
16
+
17
+
DEFAULT_SETTINGS: dict = {
18
+
"input": {
19
+
"type": "mastodon-wss",
20
+
"instance": "env:MASTODON_INSTANCE",
21
+
"token": "env:MASTODON_TOKEN",
22
+
"options": MastodonInputOptions({}),
23
+
},
24
+
"outputs": [
25
+
{
26
+
"type": "bluesky",
27
+
"handle": "env:BLUESKY_HANDLE",
28
+
"app-password": "env:BLUESKY_APP_PASSWORD",
29
+
"options": BlueskyOutputOptions({}),
30
+
}
31
+
],
32
+
}
14
33
34
+
INPUTS = {
35
+
"mastodon-wss": lambda settings, db: MastodonInput(settings, db),
36
+
"misskey-wss": lambda settigs, db: MisskeyInput(settigs, db),
37
+
"bluesky-jetstream-wss": lambda settings, db: BlueskyJetstreamInput(settings, db),
38
+
}
15
39
16
-
def main() -> None:
17
-
if not env.DATA_DIR.exists():
18
-
env.DATA_DIR.mkdir(parents=True)
40
+
OUTPUTS = {
41
+
"bluesky": lambda input, settings, db: BlueskyOutput(input, settings, db),
42
+
"mastodon": lambda input, settings, db: MastodonOutput(input, settings, db),
43
+
}
19
44
20
-
if not env.SETTINGS_DIR.exists():
21
-
LOGGER.info("First launch detected! Creating %s and exiting!", env.SETTINGS_DIR)
22
-
return
23
45
24
-
migrator = DatabaseMigrator(env.DATABASE_DIR, env.MIGRATIONS_DIR)
25
-
try:
26
-
migrator.migrate()
27
-
except Exception:
28
-
LOGGER.exception("Failed to migrate database!")
29
-
return
30
-
finally:
31
-
migrator.close()
46
+
def execute(data_dir):
47
+
if not os.path.exists(data_dir):
48
+
os.makedirs(data_dir)
32
49
33
-
db_pool = DatabasePool(env.DATABASE_DIR)
50
+
settings_path = os.path.join(data_dir, "settings.json")
51
+
database_path = os.path.join(data_dir, "data.db")
34
52
35
-
LOGGER.info("Bootstrapping registries...")
36
-
bootstrap()
53
+
if not os.path.exists(settings_path):
54
+
LOGGER.info("First launch detected! Creating %s and exiting!", settings_path)
55
+
56
+
with open(settings_path, "w") as f:
57
+
f.write(as_json(DEFAULT_SETTINGS, indent=2))
58
+
return 0
37
59
38
60
LOGGER.info("Loading settings...")
39
-
40
-
with open(env.SETTINGS_DIR) as f:
61
+
with open(settings_path, "rb") as f:
41
62
settings = json.load(f)
42
-
read_env(settings)
43
63
44
-
if "input" not in settings:
45
-
raise KeyError("No `input` sepcified in settings!")
46
-
if "outputs" not in settings:
47
-
raise KeyError("No `outputs` spicified in settings!")
64
+
LOGGER.info("Starting database worker...")
65
+
db_worker = database.DataBaseWorker(os.path.abspath(database_path))
48
66
49
-
input = create_input_service(db_pool, settings["input"])
50
-
outputs = [create_output_service(db_pool, data) for data in settings["outputs"]]
67
+
db_worker.execute("PRAGMA foreign_keys = ON;")
68
+
69
+
# create the posts table
70
+
# id - internal id of the post
71
+
# user_id - user id on the service (e.g. a724sknj5y9ydk0w)
72
+
# service - the service (e.g. https://shrimp.melontini.me)
73
+
# identifier - post id on the service (e.g. a8mpiyeej0fpjp0p)
74
+
# parent_id - the internal id of the parent
75
+
db_worker.execute(
76
+
"""
77
+
CREATE TABLE IF NOT EXISTS posts (
78
+
id INTEGER PRIMARY KEY AUTOINCREMENT,
79
+
user_id TEXT NOT NULL,
80
+
service TEXT NOT NULL,
81
+
identifier TEXT NOT NULL,
82
+
parent_id INTEGER NULL REFERENCES posts(id) ON DELETE SET NULL,
83
+
root_id INTEGER NULL REFERENCES posts(id) ON DELETE SET NULL
84
+
);
85
+
"""
86
+
)
87
+
88
+
columns = db_worker.execute("PRAGMA table_info(posts)")
89
+
column_names = [col[1] for col in columns]
90
+
if "reposted_id" not in column_names:
91
+
db_worker.execute("""
92
+
ALTER TABLE posts
93
+
ADD COLUMN reposted_id INTEGER NULL REFERENCES posts(id) ON DELETE SET NULL
94
+
""")
95
+
if "extra_data" not in column_names:
96
+
db_worker.execute("""
97
+
ALTER TABLE posts
98
+
ADD COLUMN extra_data TEXT NULL
99
+
""")
100
+
101
+
# create the mappings table
102
+
# original_post_id - the post this was mapped from
103
+
# mapped_post_id - the post this was mapped to
104
+
db_worker.execute(
105
+
"""
106
+
CREATE TABLE IF NOT EXISTS mappings (
107
+
original_post_id INTEGER NOT NULL REFERENCES posts(id) ON DELETE CASCADE,
108
+
mapped_post_id INTEGER NOT NULL
109
+
);
110
+
"""
111
+
)
112
+
113
+
input_settings = settings.get("input")
114
+
if not input_settings:
115
+
raise Exception("No input specified!")
116
+
outputs_settings = settings.get("outputs", [])
117
+
118
+
input = INPUTS[input_settings["type"]](input_settings, db_worker)
119
+
120
+
if not outputs_settings:
121
+
LOGGER.warning("No outputs specified! Check the config!")
122
+
123
+
outputs: list[cross.Output] = []
124
+
for output_settings in outputs_settings:
125
+
outputs.append(
126
+
OUTPUTS[output_settings["type"]](input, output_settings, db_worker)
127
+
)
51
128
52
129
LOGGER.info("Starting task worker...")
53
130
54
-
def worker(task_queue: queue.Queue[Callable[[], None] | None]):
131
+
def worker(queue: queue.Queue):
55
132
while True:
56
-
task = task_queue.get()
133
+
task = queue.get()
57
134
if task is None:
58
135
break
59
136
60
137
try:
61
138
task()
62
-
except Exception:
63
-
LOGGER.exception("Exception in worker thread!")
139
+
except Exception as e:
140
+
LOGGER.error(f"Exception in worker thread!\n{e}")
141
+
traceback.print_exc()
64
142
finally:
65
-
task_queue.task_done()
143
+
queue.task_done()
66
144
67
-
task_queue: queue.Queue[Callable[[], None] | None] = queue.Queue()
145
+
task_queue = queue.Queue()
68
146
thread = threading.Thread(target=worker, args=(task_queue,), daemon=True)
69
147
thread.start()
70
148
71
-
LOGGER.info("Connecting to %s...", input.url)
72
-
input.outputs = outputs
73
-
input.submitter = lambda c: task_queue.put(c)
149
+
LOGGER.info("Connecting to %s...", input.service)
74
150
try:
75
-
asyncio.run(input.listen())
151
+
asyncio.run(input.listen(outputs, lambda x: task_queue.put(x)))
76
152
except KeyboardInterrupt:
77
153
LOGGER.info("Stopping...")
78
154
79
155
task_queue.join()
80
156
task_queue.put(None)
81
157
thread.join()
82
-
db_pool.close()
83
-
84
-
for shook in shutdown_hook:
85
-
shook()
86
158
87
159
88
160
if __name__ == "__main__":
89
-
main()
161
+
execute("./data")
+52
mastodon/common.py
+52
mastodon/common.py
···
1
+
import cross
2
+
from util.media import MediaInfo
3
+
4
+
5
+
class MastodonPost(cross.Post):
6
+
def __init__(
7
+
self,
8
+
status: dict,
9
+
tokens: list[cross.Token],
10
+
media_attachments: list[MediaInfo],
11
+
) -> None:
12
+
super().__init__()
13
+
self.id = status["id"]
14
+
self.parent_id = status.get("in_reply_to_id")
15
+
self.tokens = tokens
16
+
self.content_type = status.get("content_type", "text/plain")
17
+
self.timestamp = status["created_at"]
18
+
self.media_attachments = media_attachments
19
+
self.spoiler = status.get("spoiler_text")
20
+
self.language = [status["language"]] if status.get("language") else []
21
+
self.sensitive = status.get("sensitive", False)
22
+
self.url = status.get("url")
23
+
24
+
def get_id(self) -> str:
25
+
return self.id
26
+
27
+
def get_parent_id(self) -> str | None:
28
+
return self.parent_id
29
+
30
+
def get_tokens(self) -> list[cross.Token]:
31
+
return self.tokens
32
+
33
+
def get_text_type(self) -> str:
34
+
return self.content_type
35
+
36
+
def get_timestamp(self) -> str:
37
+
return self.timestamp
38
+
39
+
def get_attachments(self) -> list[MediaInfo]:
40
+
return self.media_attachments
41
+
42
+
def get_spoiler(self) -> str | None:
43
+
return self.spoiler
44
+
45
+
def get_languages(self) -> list[str]:
46
+
return self.language
47
+
48
+
def is_sensitive(self) -> bool:
49
+
return self.sensitive or (self.spoiler is not None and self.spoiler != "")
50
+
51
+
def get_post_url(self) -> str | None:
52
+
return self.url
-109
mastodon/info.py
-109
mastodon/info.py
···
1
-
from abc import ABC, abstractmethod
2
-
from dataclasses import dataclass
3
-
from typing import Any
4
-
5
-
import requests
6
-
7
-
from cross.service import Service
8
-
from util.util import normalize_service_url
9
-
10
-
11
-
def validate_and_transform(data: dict[str, Any]):
12
-
if "token" not in data or "instance" not in data:
13
-
raise KeyError("Missing required values 'token' or 'instance'")
14
-
15
-
data["instance"] = normalize_service_url(data["instance"])
16
-
17
-
18
-
@dataclass(kw_only=True)
19
-
class InstanceInfo:
20
-
max_characters: int = 500
21
-
max_media_attachments: int = 4
22
-
characters_reserved_per_url: int = 23
23
-
24
-
image_size_limit: int = 16777216
25
-
video_size_limit: int = 103809024
26
-
27
-
text_format: str = "text/plain"
28
-
29
-
@classmethod
30
-
def from_api(cls, data: dict[str, Any]) -> "InstanceInfo":
31
-
config: dict[str, Any] = {}
32
-
33
-
if "statuses" in data:
34
-
statuses_config: dict[str, Any] = data.get("statuses", {})
35
-
if "max_characters" in statuses_config:
36
-
config["max_characters"] = statuses_config["max_characters"]
37
-
if "max_media_attachments" in statuses_config:
38
-
config["max_media_attachments"] = statuses_config[
39
-
"max_media_attachments"
40
-
]
41
-
if "characters_reserved_per_url" in statuses_config:
42
-
config["characters_reserved_per_url"] = statuses_config[
43
-
"characters_reserved_per_url"
44
-
]
45
-
46
-
# glitch content type
47
-
if "supported_mime_types" in statuses_config:
48
-
text_mimes: list[str] = statuses_config["supported_mime_types"]
49
-
50
-
if "text/x.misskeymarkdown" in text_mimes:
51
-
config["text_format"] = "text/x.misskeymarkdown"
52
-
elif "text/markdown" in text_mimes:
53
-
config["text_format"] = "text/markdown"
54
-
55
-
if "media_attachments" in data:
56
-
media_config: dict[str, Any] = data["media_attachments"]
57
-
if "image_size_limit" in media_config:
58
-
config["image_size_limit"] = media_config["image_size_limit"]
59
-
if "video_size_limit" in media_config:
60
-
config["video_size_limit"] = media_config["video_size_limit"]
61
-
62
-
# *oma extensions
63
-
if "max_toot_chars" in data:
64
-
config["max_characters"] = data["max_toot_chars"]
65
-
if "upload_limit" in data:
66
-
config["image_size_limit"] = data["upload_limit"]
67
-
config["video_size_limit"] = data["upload_limit"]
68
-
69
-
if "pleroma" in data:
70
-
pleroma: dict[str, Any] = data["pleroma"]
71
-
if "metadata" in pleroma:
72
-
metadata: dict[str, Any] = pleroma["metadata"]
73
-
if "post_formats" in metadata:
74
-
post_formats: list[str] = metadata["post_formats"]
75
-
76
-
if "text/x.misskeymarkdown" in post_formats:
77
-
config["text_format"] = "text/x.misskeymarkdown"
78
-
elif "text/markdown" in post_formats:
79
-
config["text_format"] = "text/markdown"
80
-
81
-
return InstanceInfo(**config)
82
-
83
-
84
-
class MastodonService(ABC, Service):
85
-
def verify_credentials(self):
86
-
token = self._get_token()
87
-
response = requests.get(
88
-
f"{self.url}/api/v1/accounts/verify_credentials",
89
-
headers={"Authorization": f"Bearer {token}"},
90
-
)
91
-
if response.status_code != 200:
92
-
self.log.error("Failed to validate user credentials!")
93
-
response.raise_for_status()
94
-
return dict(response.json())
95
-
96
-
def fetch_instance_info(self):
97
-
token = self._get_token()
98
-
responce = requests.get(
99
-
f"{self.url}/api/v1/instance",
100
-
headers={"Authorization": f"Bearer {token}"},
101
-
)
102
-
if responce.status_code != 200:
103
-
self.log.error("Failed to get instance info!")
104
-
responce.raise_for_status()
105
-
return dict(responce.json())
106
-
107
-
@abstractmethod
108
-
def _get_token(self) -> str:
109
-
pass
+161
-168
mastodon/input.py
+161
-168
mastodon/input.py
···
1
1
import asyncio
2
2
import json
3
3
import re
4
-
from dataclasses import dataclass, field
5
-
from typing import Any, cast, override
4
+
from typing import Any, Callable
6
5
6
+
import requests
7
7
import websockets
8
8
9
-
from cross.attachments import (
10
-
LabelsAttachment,
11
-
LanguagesAttachment,
12
-
MediaAttachment,
13
-
QuoteAttachment,
14
-
RemoteUrlAttachment,
15
-
SensitiveAttachment,
16
-
)
17
-
from cross.media import Blob, download_blob
18
-
from cross.post import Post
19
-
from cross.service import InputService
20
-
from database.connection import DatabasePool
21
-
from mastodon.info import MastodonService, validate_and_transform
22
-
from mastodon.parser import StatusParser
9
+
import cross
10
+
import util.database as database
11
+
import util.html_util as html_util
12
+
import util.md_util as md_util
13
+
from mastodon.common import MastodonPost
14
+
from util.database import DataBaseWorker
15
+
from util.media import MediaInfo, download_media
16
+
from util.util import LOGGER, as_envvar
23
17
24
-
ALLOWED_VISIBILITY: list[str] = ["public", "unlisted"]
18
+
ALLOWED_VISIBILITY = ["public", "unlisted"]
19
+
MARKDOWNY = ["text/x.misskeymarkdown", "text/markdown", "text/plain"]
25
20
26
21
27
-
@dataclass(kw_only=True)
28
22
class MastodonInputOptions:
29
-
token: str
30
-
instance: str
31
-
allowed_visibility: list[str] = field(
32
-
default_factory=lambda: ALLOWED_VISIBILITY.copy()
33
-
)
34
-
filters: list[re.Pattern[str]] = field(default_factory=lambda: [])
23
+
def __init__(self, o: dict) -> None:
24
+
self.allowed_visibility = ALLOWED_VISIBILITY
25
+
self.filters = [re.compile(f) for f in o.get("regex_filters", [])]
26
+
27
+
allowed_visibility = o.get("allowed_visibility")
28
+
if allowed_visibility is not None:
29
+
if any([v not in ALLOWED_VISIBILITY for v in allowed_visibility]):
30
+
raise ValueError(
31
+
f"'allowed_visibility' only accepts {', '.join(ALLOWED_VISIBILITY)}, got: {allowed_visibility}"
32
+
)
33
+
self.allowed_visibility = allowed_visibility
34
+
35
+
36
+
class MastodonInput(cross.Input):
37
+
def __init__(self, settings: dict, db: DataBaseWorker) -> None:
38
+
self.options = MastodonInputOptions(settings.get("options", {}))
39
+
self.token = as_envvar(settings.get("token")) or (_ for _ in ()).throw(
40
+
ValueError("'token' is required")
41
+
)
42
+
instance: str = as_envvar(settings.get("instance")) or (_ for _ in ()).throw(
43
+
ValueError("'instance' is required")
44
+
)
45
+
46
+
service = instance[:-1] if instance.endswith("/") else instance
47
+
48
+
LOGGER.info("Verifying %s credentails...", service)
49
+
responce = requests.get(
50
+
f"{service}/api/v1/accounts/verify_credentials",
51
+
headers={"Authorization": f"Bearer {self.token}"},
52
+
)
53
+
if responce.status_code != 200:
54
+
LOGGER.error("Failed to validate user credentials!")
55
+
responce.raise_for_status()
56
+
return
35
57
36
-
@classmethod
37
-
def from_dict(cls, data: dict[str, Any]) -> "MastodonInputOptions":
38
-
validate_and_transform(data)
58
+
super().__init__(service, responce.json()["id"], settings, db)
59
+
self.streaming = self._get_streaming_url()
39
60
40
-
if "allowed_visibility" in data:
41
-
for vis in data.get("allowed_visibility", []):
42
-
if vis not in ALLOWED_VISIBILITY:
43
-
raise ValueError(f"Invalid visibility option {vis}!")
61
+
if not self.streaming:
62
+
raise Exception("Instance %s does not support streaming!", service)
44
63
45
-
if "filters" in data:
46
-
data["filters"] = [re.compile(r) for r in data["filters"]]
64
+
def _get_streaming_url(self):
65
+
response = requests.get(f"{self.service}/api/v1/instance")
66
+
response.raise_for_status()
67
+
data: dict = response.json()
68
+
return (data.get("urls") or {}).get("streaming_api")
47
69
48
-
return MastodonInputOptions(**data)
70
+
def __to_tokens(self, status: dict):
71
+
content_type = status.get("content_type", "text/plain")
72
+
raw_text = status.get("text")
49
73
74
+
tags: list[str] = []
75
+
for tag in status.get("tags", []):
76
+
tags.append(tag["name"])
50
77
51
-
class MastodonInputService(MastodonService, InputService):
52
-
def __init__(self, db: DatabasePool, options: MastodonInputOptions) -> None:
53
-
super().__init__(options.instance, db)
54
-
self.options: MastodonInputOptions = options
78
+
mentions: list[tuple[str, str]] = []
79
+
for mention in status.get("mentions", []):
80
+
mentions.append(("@" + mention["username"], "@" + mention["acct"]))
55
81
56
-
self.log.info("Verifying %s credentails...", self.url)
57
-
response = self.verify_credentials()
58
-
self.user_id: str = response["id"]
82
+
if raw_text and content_type in MARKDOWNY:
83
+
return md_util.tokenize_markdown(raw_text, tags, mentions)
59
84
60
-
self.log.info("Getting %s configuration...", self.url)
61
-
response = self.fetch_instance_info()
62
-
self.streaming_url: str = response["urls"]["streaming_api"]
85
+
akkoma_ext: dict | None = status.get("akkoma", {}).get("source")
86
+
if akkoma_ext:
87
+
if akkoma_ext.get("mediaType") in MARKDOWNY:
88
+
return md_util.tokenize_markdown(akkoma_ext["content"], tags, mentions)
63
89
64
-
@override
65
-
def _get_token(self) -> str:
66
-
return self.options.token
90
+
tokenizer = html_util.HTMLPostTokenizer()
91
+
tokenizer.mentions = mentions
92
+
tokenizer.tags = tags
93
+
tokenizer.feed(status.get("content", ""))
94
+
return tokenizer.get_tokens()
67
95
68
-
def _on_create_post(self, status: dict[str, Any]):
69
-
if status["account"]["id"] != self.user_id:
96
+
def _on_create_post(self, outputs: list[cross.Output], status: dict):
97
+
# skip events from other users
98
+
if (status.get("account") or {})["id"] != self.user_id:
70
99
return
71
100
72
-
if status["visibility"] not in self.options.allowed_visibility:
101
+
if status.get("visibility") not in self.options.allowed_visibility:
102
+
# Skip f/o and direct posts
103
+
LOGGER.info(
104
+
"Skipping '%s'! '%s' visibility..",
105
+
status["id"],
106
+
status.get("visibility"),
107
+
)
73
108
return
74
109
75
-
reblog: dict[str, Any] | None = status.get("reblog")
76
-
if reblog:
77
-
if reblog["account"]["id"] != self.user_id:
78
-
return
79
-
self._on_reblog(status, reblog)
110
+
# TODO polls not supported on bsky. maybe 3rd party? skip for now
111
+
# we don't handle reblogs. possible with bridgy(?) and self
112
+
# we don't handle quotes.
113
+
if status.get("poll"):
114
+
LOGGER.info("Skipping '%s'! Contains a poll..", status["id"])
80
115
return
81
116
82
-
if status.get("poll"):
83
-
self.log.info("Skipping '%s'! Contains a poll..", status["id"])
117
+
if status.get("quote_id") or status.get("quote"):
118
+
LOGGER.info("Skipping '%s'! Quote..", status["id"])
84
119
return
85
120
86
-
quote: dict[str, Any] | None = status.get("quote")
87
-
if quote:
88
-
quote = quote['quoted_status'] if quote.get('quoted_status') else quote
89
-
if not quote or quote["account"]["id"] != self.user_id:
121
+
reblog: dict | None = status.get("reblog")
122
+
if reblog:
123
+
if (reblog.get("account") or {})["id"] != self.user_id:
124
+
LOGGER.info("Skipping '%s'! Reblog of other user..", status["id"])
90
125
return
91
126
92
-
rquote = self._get_post(self.url, self.user_id, quote['id'])
93
-
if not rquote:
94
-
self.log.info(
95
-
"Skipping %s, parent %s not found in db", status["id"], quote['id']
127
+
success = database.try_insert_repost(
128
+
self.db, status["id"], reblog["id"], self.user_id, self.service
129
+
)
130
+
if not success:
131
+
LOGGER.info(
132
+
"Skipping '%s' as reblogged post was not found in db!", status["id"]
96
133
)
97
134
return
135
+
136
+
for output in outputs:
137
+
output.accept_repost(status["id"], reblog["id"])
138
+
return
98
139
99
140
in_reply: str | None = status.get("in_reply_to_id")
100
141
in_reply_to: str | None = status.get("in_reply_to_account_id")
101
142
if in_reply_to and in_reply_to != self.user_id:
143
+
# We don't support replies.
144
+
LOGGER.info("Skipping '%s'! Reply to other user..", status["id"])
102
145
return
103
146
104
-
parent = None
105
-
if in_reply:
106
-
parent = self._get_post(self.url, self.user_id, in_reply)
107
-
if not parent:
108
-
self.log.info(
109
-
"Skipping %s, parent %s not found in db", status["id"], in_reply
110
-
)
111
-
return
112
-
parser = StatusParser(status)
113
-
parser.feed(status["content"])
114
-
tokens = parser.get_result()
115
-
116
-
post = Post(id=status["id"], parent_id=in_reply, tokens=tokens)
117
-
118
-
if quote:
119
-
post.attachments.put(QuoteAttachment(quoted_id=quote['id'], quoted_user=self.user_id))
120
-
if status.get("url"):
121
-
post.attachments.put(RemoteUrlAttachment(url=status["url"]))
122
-
if status.get("sensitive"):
123
-
post.attachments.put(SensitiveAttachment(sensitive=True))
124
-
if status.get("language"):
125
-
post.attachments.put(LanguagesAttachment(langs=[status["language"]]))
126
-
if status.get("spoiler"):
127
-
post.attachments.put(LabelsAttachment(labels=[status["spoiler"]]))
128
-
129
-
blobs: list[Blob] = []
130
-
for media in status.get("media_attachments", []):
131
-
self.log.info("Downloading %s...", media["url"])
132
-
blob: Blob | None = download_blob(media["url"], media.get("alt"))
133
-
if not blob:
134
-
self.log.error(
135
-
"Skipping %s! Failed to download media %s.",
136
-
status["id"],
137
-
media["url"],
138
-
)
139
-
return
140
-
blobs.append(blob)
141
-
142
-
if blobs:
143
-
post.attachments.put(MediaAttachment(blobs=blobs))
144
-
145
-
if parent:
146
-
self._insert_post(
147
-
{
148
-
"user": self.user_id,
149
-
"service": self.url,
150
-
"identifier": status["id"],
151
-
"parent": parent["id"],
152
-
"root": parent["id"] if not parent["root"] else parent["root"],
153
-
}
154
-
)
155
-
else:
156
-
self._insert_post(
157
-
{
158
-
"user": self.user_id,
159
-
"service": self.url,
160
-
"identifier": status["id"],
161
-
}
147
+
success = database.try_insert_post(
148
+
self.db, status["id"], in_reply, self.user_id, self.service
149
+
)
150
+
if not success:
151
+
LOGGER.info(
152
+
"Skipping '%s' as parent post was not found in db!", status["id"]
162
153
)
154
+
return
163
155
164
-
for out in self.outputs:
165
-
self.submitter(lambda: out.accept_post(post))
166
-
167
-
def _on_reblog(self, status: dict[str, Any], reblog: dict[str, Any]):
168
-
reposted = self._get_post(self.url, self.user_id, reblog["id"])
169
-
if not reposted:
170
-
self.log.info(
171
-
"Skipping repost '%s' as reposted post '%s' was not found in the db.",
172
-
status["id"],
173
-
reblog["id"],
174
-
)
156
+
tokens = self.__to_tokens(status)
157
+
if not cross.test_filters(tokens, self.options.filters):
158
+
LOGGER.info("Skipping '%s'. Matched a filter!", status["id"])
175
159
return
176
160
177
-
self._insert_post(
178
-
{
179
-
"user": self.user_id,
180
-
"service": self.url,
181
-
"identifier": status["id"],
182
-
"reposted": reposted["id"],
183
-
}
184
-
)
161
+
LOGGER.info("Crossposting '%s'...", status["id"])
162
+
163
+
media_attachments: list[MediaInfo] = []
164
+
for attachment in status.get("media_attachments", []):
165
+
LOGGER.info("Downloading %s...", attachment["url"])
166
+
info = download_media(
167
+
attachment["url"], attachment.get("description") or ""
168
+
)
169
+
if not info:
170
+
LOGGER.error("Skipping '%s'. Failed to download media!", status["id"])
171
+
return
172
+
media_attachments.append(info)
185
173
186
-
for out in self.outputs:
187
-
self.submitter(lambda: out.accept_repost(status["id"], reblog["id"]))
174
+
cross_post = MastodonPost(status, tokens, media_attachments)
175
+
for output in outputs:
176
+
output.accept_post(cross_post)
188
177
189
-
def _on_delete_post(self, status_id: str):
190
-
post = self._get_post(self.url, self.user_id, status_id)
178
+
def _on_delete_post(self, outputs: list[cross.Output], identifier: str):
179
+
post = database.find_post(self.db, identifier, self.user_id, self.service)
191
180
if not post:
192
181
return
193
182
183
+
LOGGER.info("Deleting '%s'...", identifier)
194
184
if post["reposted_id"]:
195
-
for output in self.outputs:
196
-
self.submitter(lambda: output.delete_repost(status_id))
185
+
for output in outputs:
186
+
output.delete_repost(identifier)
197
187
else:
198
-
for output in self.outputs:
199
-
self.submitter(lambda: output.delete_post(status_id))
200
-
self._delete_post_by_id(post["id"])
188
+
for output in outputs:
189
+
output.delete_post(identifier)
201
190
202
-
def _accept_msg(self, msg: websockets.Data) -> None:
203
-
data: dict[str, Any] = cast(dict[str, Any], json.loads(msg))
204
-
event: str = cast(str, data["event"])
205
-
payload: str = cast(str, data["payload"])
191
+
database.delete_post(self.db, identifier, self.user_id, self.service)
206
192
207
-
if event == "update":
208
-
self._on_create_post(json.loads(payload))
209
-
elif event == "delete":
210
-
self._on_delete_post(payload)
193
+
def _on_post(self, outputs: list[cross.Output], event: str, payload: str):
194
+
match event:
195
+
case "update":
196
+
self._on_create_post(outputs, json.loads(payload))
197
+
case "delete":
198
+
self._on_delete_post(outputs, payload)
211
199
212
-
@override
213
-
async def listen(self):
214
-
url = f"{self.streaming_url}/api/v1/streaming?stream=user"
200
+
async def listen(
201
+
self, outputs: list[cross.Output], submit: Callable[[Callable[[], Any]], Any]
202
+
):
203
+
uri = f"{self.streaming}/api/v1/streaming?stream=user&access_token={self.token}"
215
204
216
205
async for ws in websockets.connect(
217
-
url, additional_headers={"Authorization": f"Bearer {self.options.token}"}
206
+
uri, extra_headers={"User-Agent": "XPost/0.0.3"}
218
207
):
219
208
try:
220
-
self.log.info("Listening to %s...", self.streaming_url)
209
+
LOGGER.info("Listening to %s...", self.streaming)
221
210
222
211
async def listen_for_messages():
223
212
async for msg in ws:
224
-
self.submitter(lambda: self._accept_msg(msg))
213
+
data = json.loads(msg)
214
+
event: str = data.get("event")
215
+
payload: str = data.get("payload")
216
+
217
+
submit(lambda: self._on_post(outputs, str(event), str(payload)))
225
218
226
219
listen = asyncio.create_task(listen_for_messages())
227
220
228
-
_ = await asyncio.gather(listen)
221
+
await asyncio.gather(listen)
229
222
except websockets.ConnectionClosedError as e:
230
-
self.log.error(e, stack_info=True, exc_info=True)
231
-
self.log.info("Reconnecting to %s...", self.streaming_url)
223
+
LOGGER.error(e, stack_info=True, exc_info=True)
224
+
LOGGER.info("Reconnecting to %s...", self.streaming)
232
225
continue
+388
-118
mastodon/output.py
+388
-118
mastodon/output.py
···
1
-
from dataclasses import dataclass
2
-
from typing import Any, override
1
+
import time
3
2
4
3
import requests
5
4
6
-
from cross.attachments import (
7
-
LanguagesAttachment,
8
-
QuoteAttachment,
9
-
RemoteUrlAttachment,
10
-
SensitiveAttachment,
11
-
)
12
-
from cross.post import Post
13
-
from cross.service import OutputService
14
-
from database.connection import DatabasePool
15
-
from mastodon.info import InstanceInfo, MastodonService, validate_and_transform
5
+
import cross
6
+
import misskey.mfm_util as mfm_util
7
+
import util.database as database
8
+
from util.database import DataBaseWorker
9
+
from util.media import MediaInfo
10
+
from util.util import LOGGER, as_envvar, canonical_label
16
11
17
-
ALLOWED_POSTING_VISIBILITY: list[str] = ["public", "unlisted", "private"]
12
+
POSSIBLE_MIMES = [
13
+
"audio/ogg",
14
+
"audio/mp3",
15
+
"image/webp",
16
+
"image/jpeg",
17
+
"image/png",
18
+
"video/mp4",
19
+
"video/quicktime",
20
+
"video/webm",
21
+
]
18
22
23
+
TEXT_MIMES = ["text/x.misskeymarkdown", "text/markdown", "text/plain"]
19
24
20
-
@dataclass(kw_only=True)
25
+
ALLOWED_POSTING_VISIBILITY = ["public", "unlisted", "private"]
26
+
27
+
21
28
class MastodonOutputOptions:
22
-
token: str
23
-
instance: str
24
-
visibility: str = "public"
29
+
def __init__(self, o: dict) -> None:
30
+
self.visibility = "public"
31
+
32
+
visibility = o.get("visibility")
33
+
if visibility is not None:
34
+
if visibility not in ALLOWED_POSTING_VISIBILITY:
35
+
raise ValueError(
36
+
f"'visibility' only accepts {', '.join(ALLOWED_POSTING_VISIBILITY)}, got: {visibility}"
37
+
)
38
+
self.visibility = visibility
39
+
40
+
41
+
class MastodonOutput(cross.Output):
42
+
def __init__(self, input: cross.Input, settings: dict, db: DataBaseWorker) -> None:
43
+
super().__init__(input, settings, db)
44
+
self.options = settings.get("options") or {}
45
+
self.token = as_envvar(settings.get("token")) or (_ for _ in ()).throw(
46
+
ValueError("'token' is required")
47
+
)
48
+
instance: str = as_envvar(settings.get("instance")) or (_ for _ in ()).throw(
49
+
ValueError("'instance' is required")
50
+
)
51
+
52
+
self.service = instance[:-1] if instance.endswith("/") else instance
53
+
54
+
LOGGER.info("Verifying %s credentails...", self.service)
55
+
responce = requests.get(
56
+
f"{self.service}/api/v1/accounts/verify_credentials",
57
+
headers={"Authorization": f"Bearer {self.token}"},
58
+
)
59
+
if responce.status_code != 200:
60
+
LOGGER.error("Failed to validate user credentials!")
61
+
responce.raise_for_status()
62
+
return
63
+
self.user_id: str = responce.json()["id"]
64
+
65
+
LOGGER.info("Getting %s configuration...", self.service)
66
+
responce = requests.get(
67
+
f"{self.service}/api/v1/instance",
68
+
headers={"Authorization": f"Bearer {self.token}"},
69
+
)
70
+
if responce.status_code != 200:
71
+
LOGGER.error("Failed to get instance info!")
72
+
responce.raise_for_status()
73
+
return
74
+
75
+
instance_info: dict = responce.json()
76
+
configuration: dict = instance_info["configuration"]
77
+
78
+
statuses_config: dict = configuration.get("statuses", {})
79
+
self.max_characters: int = statuses_config.get("max_characters", 500)
80
+
self.max_media_attachments: int = statuses_config.get(
81
+
"max_media_attachments", 4
82
+
)
83
+
self.characters_reserved_per_url: int = statuses_config.get(
84
+
"characters_reserved_per_url", 23
85
+
)
86
+
87
+
media_config: dict = configuration.get("media_attachments", {})
88
+
self.image_size_limit: int = media_config.get("image_size_limit", 16777216)
89
+
self.video_size_limit: int = media_config.get("video_size_limit", 103809024)
90
+
self.supported_mime_types: list[str] = media_config.get(
91
+
"supported_mime_types", POSSIBLE_MIMES
92
+
)
93
+
94
+
# *oma: max post chars
95
+
max_toot_chars = instance_info.get("max_toot_chars")
96
+
if max_toot_chars:
97
+
self.max_characters: int = max_toot_chars
98
+
99
+
# *oma: max upload limit
100
+
upload_limit = instance_info.get("upload_limit")
101
+
if upload_limit:
102
+
self.image_size_limit: int = upload_limit
103
+
self.video_size_limit: int = upload_limit
104
+
105
+
# chuckya: supported text types
106
+
chuckya_text_mimes: list[str] = statuses_config.get("supported_mime_types", [])
107
+
self.text_format = next(
108
+
(mime for mime in TEXT_MIMES if mime in (chuckya_text_mimes)), "text/plain"
109
+
)
110
+
111
+
# *oma ext: supported text types
112
+
pleroma = instance_info.get("pleroma")
113
+
if pleroma:
114
+
post_formats: list[str] = pleroma.get("metadata", {}).get(
115
+
"post_formats", []
116
+
)
117
+
self.text_format = next(
118
+
(mime for mime in TEXT_MIMES if mime in post_formats), self.text_format
119
+
)
120
+
121
+
def upload_media(self, attachments: list[MediaInfo]) -> list[str] | None:
122
+
for a in attachments:
123
+
if a.mime.startswith("image/") and len(a.io) > self.image_size_limit:
124
+
return None
25
125
26
-
@classmethod
27
-
def from_dict(cls, data: dict[str, Any]) -> "MastodonOutputOptions":
28
-
validate_and_transform(data)
126
+
if a.mime.startswith("video/") and len(a.io) > self.video_size_limit:
127
+
return None
29
128
30
-
if "visibility" in data:
31
-
if data["visibility"] not in ALLOWED_POSTING_VISIBILITY:
32
-
raise ValueError(f"Invalid visibility option {data['visibility']}!")
129
+
if not a.mime.startswith("image/") and not a.mime.startswith("video/"):
130
+
if len(a.io) > 7_000_000:
131
+
return None
33
132
34
-
return MastodonOutputOptions(**data)
133
+
uploads: list[dict] = []
134
+
for a in attachments:
135
+
data = {}
136
+
if a.alt:
137
+
data["description"] = a.alt
35
138
139
+
req = requests.post(
140
+
f"{self.service}/api/v2/media",
141
+
headers={"Authorization": f"Bearer {self.token}"},
142
+
files={"file": (a.name, a.io, a.mime)},
143
+
data=data,
144
+
)
36
145
37
-
# TODO
38
-
class MastodonOutputService(MastodonService, OutputService):
39
-
def __init__(self, db: DatabasePool, options: MastodonOutputOptions) -> None:
40
-
super().__init__(options.instance, db)
41
-
self.options: MastodonOutputOptions = options
146
+
if req.status_code == 200:
147
+
LOGGER.info("Uploaded %s! (%s)", a.name, req.json()["id"])
148
+
uploads.append({"done": True, "id": req.json()["id"]})
149
+
elif req.status_code == 202:
150
+
LOGGER.info("Waiting for %s to process!", a.name)
151
+
uploads.append({"done": False, "id": req.json()["id"]})
152
+
else:
153
+
LOGGER.error("Failed to upload %s! %s", a.name, req.text)
154
+
req.raise_for_status()
42
155
43
-
self.log.info("Verifying %s credentails...", self.url)
44
-
response = self.verify_credentials()
45
-
self.user_id: str = response["id"]
156
+
while any([not val["done"] for val in uploads]):
157
+
LOGGER.info("Waiting for media to process...")
158
+
time.sleep(3)
159
+
for media in uploads:
160
+
if media["done"]:
161
+
continue
46
162
47
-
self.log.info("Getting %s configuration...", self.url)
48
-
response = self.fetch_instance_info()
49
-
self.instance_info: InstanceInfo = InstanceInfo.from_api(response)
163
+
reqs = requests.get(
164
+
f"{self.service}/api/v1/media/{media['id']}",
165
+
headers={"Authorization": f"Bearer {self.token}"},
166
+
)
50
167
51
-
def accept_post(self, service: str, user: str, post: Post):
168
+
if reqs.status_code == 206:
169
+
continue
170
+
171
+
if reqs.status_code == 200:
172
+
media["done"] = True
173
+
continue
174
+
reqs.raise_for_status()
175
+
176
+
return [val["id"] for val in uploads]
177
+
178
+
def token_to_string(self, tokens: list[cross.Token]) -> str | None:
179
+
p_text: str = ""
180
+
181
+
for token in tokens:
182
+
if isinstance(token, cross.TextToken):
183
+
p_text += token.text
184
+
elif isinstance(token, cross.TagToken):
185
+
p_text += "#" + token.tag
186
+
elif isinstance(token, cross.LinkToken):
187
+
if canonical_label(token.label, token.href):
188
+
p_text += token.href
189
+
else:
190
+
if self.text_format == "text/plain":
191
+
p_text += f"{token.label} ({token.href})"
192
+
elif self.text_format in {
193
+
"text/x.misskeymarkdown",
194
+
"text/markdown",
195
+
}:
196
+
p_text += f"[{token.label}]({token.href})"
197
+
else:
198
+
return None
199
+
200
+
return p_text
201
+
202
+
def split_tokens_media(self, tokens: list[cross.Token], media: list[MediaInfo]):
203
+
split_tokens = cross.split_tokens(
204
+
tokens, self.max_characters, self.characters_reserved_per_url
205
+
)
206
+
post_text: list[str] = []
207
+
208
+
for block in split_tokens:
209
+
baked_text = self.token_to_string(block)
210
+
211
+
if baked_text is None:
212
+
return None
213
+
post_text.append(baked_text)
214
+
215
+
if not post_text:
216
+
post_text = [""]
217
+
218
+
posts: list[dict] = [
219
+
{"text": post_text, "attachments": []} for post_text in post_text
220
+
]
221
+
available_indices: list[int] = list(range(len(posts)))
222
+
223
+
current_image_post_idx: int | None = None
224
+
225
+
def make_blank_post() -> dict:
226
+
return {"text": "", "attachments": []}
227
+
228
+
def pop_next_empty_index() -> int:
229
+
if available_indices:
230
+
return available_indices.pop(0)
231
+
else:
232
+
new_idx = len(posts)
233
+
posts.append(make_blank_post())
234
+
return new_idx
235
+
236
+
for att in media:
237
+
if (
238
+
current_image_post_idx is not None
239
+
and len(posts[current_image_post_idx]["attachments"])
240
+
< self.max_media_attachments
241
+
):
242
+
posts[current_image_post_idx]["attachments"].append(att)
243
+
else:
244
+
idx = pop_next_empty_index()
245
+
posts[idx]["attachments"].append(att)
246
+
current_image_post_idx = idx
247
+
248
+
result: list[tuple[str, list[MediaInfo]]] = []
249
+
250
+
for p in posts:
251
+
result.append((p["text"], p["attachments"]))
252
+
253
+
return result
254
+
255
+
def accept_post(self, post: cross.Post):
256
+
parent_id = post.get_parent_id()
257
+
52
258
new_root_id: int | None = None
53
259
new_parent_id: int | None = None
54
260
55
261
reply_ref: str | None = None
56
-
if post.parent_id:
57
-
thread = self._find_mapped_thread(
58
-
post.parent_id, service, user, self.url, self.user_id
262
+
if parent_id:
263
+
thread_tuple = database.find_mapped_thread(
264
+
self.db,
265
+
parent_id,
266
+
self.input.user_id,
267
+
self.input.service,
268
+
self.user_id,
269
+
self.service,
59
270
)
60
271
61
-
if not thread:
62
-
self.log.error("Failed to find thread tuple in the database!")
63
-
return
64
-
_, reply_ref, new_root_id, new_parent_id = thread
272
+
if not thread_tuple:
273
+
LOGGER.error("Failed to find thread tuple in the database!")
274
+
return None
275
+
276
+
_, reply_ref, new_root_id, new_parent_id = thread_tuple
277
+
278
+
lang: str
279
+
if post.get_languages():
280
+
lang = post.get_languages()[0]
281
+
else:
282
+
lang = "en"
283
+
284
+
post_tokens = post.get_tokens()
285
+
if post.get_text_type() == "text/x.misskeymarkdown":
286
+
post_tokens, status = mfm_util.strip_mfm(post_tokens)
287
+
post_url = post.get_post_url()
288
+
if status and post_url:
289
+
post_tokens.append(cross.TextToken("\n"))
290
+
post_tokens.append(
291
+
cross.LinkToken(post_url, "[Post contains MFM, see original]")
292
+
)
293
+
294
+
raw_statuses = self.split_tokens_media(post_tokens, post.get_attachments())
295
+
if not raw_statuses:
296
+
LOGGER.error("Failed to split post into statuses?")
297
+
return None
298
+
baked_statuses = []
299
+
300
+
for status, raw_media in raw_statuses:
301
+
media: list[str] | None = None
302
+
if raw_media:
303
+
media = self.upload_media(raw_media)
304
+
if not media:
305
+
LOGGER.error("Failed to upload attachments!")
306
+
return None
307
+
baked_statuses.append((status, media))
308
+
continue
309
+
baked_statuses.append((status, []))
65
310
66
-
quote = post.attachments.get(QuoteAttachment)
67
-
if quote:
68
-
if quote.quoted_user != user:
69
-
self.log.info("Quoted other user, skipping!")
70
-
return
311
+
created_statuses: list[str] = []
312
+
313
+
for status, media in baked_statuses:
314
+
payload = {
315
+
"status": status,
316
+
"media_ids": media or [],
317
+
"spoiler_text": post.get_spoiler() or "",
318
+
"visibility": self.options.get("visibility", "public"),
319
+
"content_type": self.text_format,
320
+
"language": lang,
321
+
}
322
+
323
+
if media:
324
+
payload["sensitive"] = post.is_sensitive()
325
+
326
+
if post.get_spoiler():
327
+
payload["sensitive"] = True
71
328
72
-
quoted_post = self._get_post(service, user, quote.quoted_id)
73
-
if not quoted_post:
74
-
self.log.error("Failed to find quoted post in the database!")
75
-
return
329
+
if not status:
330
+
payload["status"] = "๐ผ๏ธ"
76
331
77
-
quoted_mappings = self._get_mappings(quoted_post["id"], self.url, self.user_id)
78
-
if not quoted_mappings:
79
-
self.log.error("Failed to find mappings for quoted post!")
80
-
return
332
+
if reply_ref:
333
+
payload["in_reply_to_id"] = reply_ref
81
334
82
-
quoted_local_id = quoted_mappings[-1][0]
83
-
# TODO resolve service identifier
335
+
reqs = requests.post(
336
+
f"{self.service}/api/v1/statuses",
337
+
headers={
338
+
"Authorization": f"Bearer {self.token}",
339
+
"Content-Type": "application/json",
340
+
},
341
+
json=payload,
342
+
)
84
343
85
-
post_tokens = post.tokens.copy()
344
+
if reqs.status_code != 200:
345
+
LOGGER.info(
346
+
"Failed to post status! %s - %s", reqs.status_code, reqs.text
347
+
)
348
+
reqs.raise_for_status()
86
349
87
-
remote_url = post.attachments.get(RemoteUrlAttachment)
88
-
if remote_url and remote_url.url and post.text_type == "text/x.misskeymarkdown":
89
-
# TODO stip mfm
90
-
pass
350
+
reply_ref = reqs.json()["id"]
351
+
LOGGER.info("Created new status %s!", reply_ref)
91
352
92
-
raw_statuses = [] # TODO split tokens and media across posts
93
-
if not raw_statuses:
94
-
self.log.error("Failed to split post into statuses!")
95
-
return
353
+
created_statuses.append(reqs.json()["id"])
96
354
97
-
langs = post.attachments.get(LanguagesAttachment)
98
-
sensitive = post.attachments.get(SensitiveAttachment)
355
+
db_post = database.find_post(
356
+
self.db, post.get_id(), self.input.user_id, self.input.service
357
+
)
358
+
assert db_post, "ghghghhhhh"
99
359
100
-
if langs and langs.langs:
101
-
pass # TODO
360
+
if new_root_id is None or new_parent_id is None:
361
+
new_root_id = database.insert_post(
362
+
self.db, created_statuses[0], self.user_id, self.service
363
+
)
364
+
new_parent_id = new_root_id
365
+
database.insert_mapping(self.db, db_post["id"], new_parent_id)
366
+
created_statuses = created_statuses[1:]
102
367
103
-
if sensitive and sensitive.sensitive:
104
-
pass # TODO
368
+
for db_id in created_statuses:
369
+
new_parent_id = database.insert_reply(
370
+
self.db, db_id, self.user_id, self.service, new_parent_id, new_root_id
371
+
)
372
+
database.insert_mapping(self.db, db_post["id"], new_parent_id)
105
373
106
-
def delete_post(self, service: str, user: str, post_id: str):
107
-
post = self._get_post(service, user, post_id)
374
+
def delete_post(self, identifier: str):
375
+
post = database.find_post(
376
+
self.db, identifier, self.input.user_id, self.input.service
377
+
)
108
378
if not post:
109
-
self.log.info("Post not found in db, skipping delete..")
110
379
return
111
380
112
-
mappings = self._get_mappings(post["id"], self.url, self.user_id)
381
+
mappings = database.find_mappings(
382
+
self.db, post["id"], self.service, self.user_id
383
+
)
113
384
for mapping in mappings[::-1]:
114
-
self.log.info("Deleting '%s'...", mapping["identifier"])
385
+
LOGGER.info("Deleting '%s'...", mapping[0])
115
386
requests.delete(
116
-
f"{self.url}/api/v1/statuses/{mapping['identifier']}",
117
-
headers={"Authorization": f"Bearer {self._get_token()}"},
387
+
f"{self.service}/api/v1/statuses/{mapping[0]}",
388
+
headers={"Authorization": f"Bearer {self.token}"},
118
389
)
119
-
self._delete_post_by_id(mapping["id"])
390
+
database.delete_post(self.db, mapping[0], self.service, self.user_id)
391
+
392
+
def accept_repost(self, repost_id: str, reposted_id: str):
393
+
repost = self.__delete_repost(repost_id)
394
+
if not repost:
395
+
return None
120
396
121
-
def accept_repost(self, service: str, user: str, repost_id: str, reposted_id: str):
122
-
reposted = self._get_post(service, user, reposted_id)
397
+
reposted = database.find_post(
398
+
self.db, reposted_id, self.input.user_id, self.input.service
399
+
)
123
400
if not reposted:
124
-
self.log.info("Post not found in db, skipping repost..")
125
401
return
126
402
127
-
mappings = self._get_mappings(reposted["id"], self.url, self.user_id)
403
+
mappings = database.find_mappings(
404
+
self.db, reposted["id"], self.service, self.user_id
405
+
)
128
406
if mappings:
129
407
rsp = requests.post(
130
-
f"{self.url}/api/v1/statuses/{mappings[0]['identifier']}/reblog",
131
-
headers={"Authorization": f"Bearer {self._get_token()}"},
408
+
f"{self.service}/api/v1/statuses/{mappings[0][0]}/reblog",
409
+
headers={"Authorization": f"Bearer {self.token}"},
132
410
)
133
411
134
412
if rsp.status_code != 200:
135
-
self.log.error(
413
+
LOGGER.error(
136
414
"Failed to boost status! status_code: %s, msg: %s",
137
415
rsp.status_code,
138
416
rsp.content,
139
417
)
140
418
return
141
419
142
-
self._insert_post(
143
-
{
144
-
"user": self.user_id,
145
-
"service": self.url,
146
-
"identifier": rsp.json()["id"],
147
-
"reposted": mappings[0]["id"],
148
-
}
420
+
internal_id = database.insert_repost(
421
+
self.db, rsp.json()["id"], reposted["id"], self.user_id, self.service
149
422
)
150
-
inserted = self._get_post(self.url, self.user_id, rsp.json()["id"])
151
-
if not inserted:
152
-
raise ValueError("Inserted post not found!")
153
-
self._insert_post_mapping(reposted["id"], inserted["id"])
423
+
database.insert_mapping(self.db, repost["id"], internal_id)
154
424
155
-
def delete_repost(self, service: str, user: str, repost_id: str):
156
-
repost = self._get_post(service, user, repost_id)
425
+
def __delete_repost(self, repost_id: str) -> dict | None:
426
+
repost = database.find_post(
427
+
self.db, repost_id, self.input.user_id, self.input.service
428
+
)
157
429
if not repost:
158
-
self.log.info("Repost not found in db, skipping delete..")
159
-
return
430
+
return None
160
431
161
-
mappings = self._get_mappings(repost["id"], self.url, self.user_id)
162
-
rmappings = self._get_mappings(repost["reposted"], self.url, self.user_id)
163
-
164
-
if mappings and rmappings:
165
-
self.log.info(
166
-
"Removing '%s' Repost of '%s'...",
167
-
mappings[0]["identifier"],
168
-
rmappings[0]["identifier"],
169
-
)
432
+
mappings = database.find_mappings(
433
+
self.db, repost["id"], self.service, self.user_id
434
+
)
435
+
reposted_mappings = database.find_mappings(
436
+
self.db, repost["reposted_id"], self.service, self.user_id
437
+
)
438
+
if mappings and reposted_mappings:
439
+
LOGGER.info("Deleting '%s'...", mappings[0][0])
170
440
requests.post(
171
-
f"{self.url}/api/v1/statuses/{rmappings[0]['identifier']}/unreblog",
172
-
headers={"Authorization": f"Bearer {self._get_token()}"},
441
+
f"{self.service}/api/v1/statuses/{reposted_mappings[0][0]}/unreblog",
442
+
headers={"Authorization": f"Bearer {self.token}"},
173
443
)
174
-
self._delete_post_by_id(mappings[0]["id"])
444
+
database.delete_post(self.db, mappings[0][0], self.user_id, self.service)
445
+
return repost
175
446
176
-
@override
177
-
def _get_token(self) -> str:
178
-
return self.options.token
447
+
def delete_repost(self, repost_id: str):
448
+
self.__delete_repost(repost_id)
-31
mastodon/parser.py
-31
mastodon/parser.py
···
1
-
from typing import Any, override
2
-
3
-
from cross.tokens import LinkToken, MentionToken, TagToken
4
-
from util.html import HTMLToTokensParser
5
-
6
-
7
-
class StatusParser(HTMLToTokensParser):
8
-
def __init__(self, status: dict[str, Any]) -> None:
9
-
super().__init__()
10
-
self.tags: set[str] = set(tag["url"] for tag in status.get("tags", []))
11
-
self.mentions: set[str] = set(m["url"] for m in status.get("mentions", []))
12
-
13
-
@override
14
-
def handle_a_endtag(self):
15
-
label, _attr = self._tag_stack.pop("a")
16
-
17
-
href = _attr.get("href")
18
-
if href:
19
-
cls = _attr.get("class", "")
20
-
if cls:
21
-
if "hashtag" in cls and href in self.tags:
22
-
tag = label[1:] if label.startswith("#") else label
23
-
24
-
self.tokens.append(TagToken(tag=tag))
25
-
return
26
-
if "mention" in cls and href in self.mentions:
27
-
username = label[1:] if label.startswith("@") else label
28
-
29
-
self.tokens.append(MentionToken(username=username, uri=href))
30
-
return
31
-
self.tokens.append(LinkToken(href=href, label=label))
-21
migrations/001_initdb_v1.py
-21
migrations/001_initdb_v1.py
···
1
-
import sqlite3
2
-
3
-
4
-
def migrate(conn: sqlite3.Connection):
5
-
_ = conn.execute("""
6
-
CREATE TABLE IF NOT EXISTS posts (
7
-
id INTEGER PRIMARY KEY AUTOINCREMENT,
8
-
user_id TEXT NOT NULL,
9
-
service TEXT NOT NULL,
10
-
identifier TEXT NOT NULL,
11
-
parent_id INTEGER NULL REFERENCES posts(id) ON DELETE SET NULL,
12
-
root_id INTEGER NULL REFERENCES posts(id) ON DELETE SET NULL
13
-
);
14
-
""")
15
-
_ = conn.execute("""
16
-
CREATE TABLE IF NOT EXISTS mappings (
17
-
original_post_id INTEGER NOT NULL REFERENCES posts(id) ON DELETE CASCADE,
18
-
mapped_post_id INTEGER NOT NULL
19
-
);
20
-
""")
21
-
pass
-11
migrations/002_add_reposted_column_v1.py
-11
migrations/002_add_reposted_column_v1.py
···
1
-
import sqlite3
2
-
3
-
4
-
def migrate(conn: sqlite3.Connection):
5
-
columns = conn.execute("PRAGMA table_info(posts)")
6
-
column_names = [col[1] for col in columns]
7
-
if "reposted_id" not in column_names:
8
-
_ = conn.execute("""
9
-
ALTER TABLE posts
10
-
ADD COLUMN reposted_id INTEGER NULL REFERENCES posts(id) ON DELETE SET NULL
11
-
""")
-22
migrations/003_add_extra_data_column_v1.py
-22
migrations/003_add_extra_data_column_v1.py
···
1
-
import json
2
-
import sqlite3
3
-
4
-
5
-
def migrate(conn: sqlite3.Connection):
6
-
columns = conn.execute("PRAGMA table_info(posts)")
7
-
column_names = [col[1] for col in columns]
8
-
if "extra_data" not in column_names:
9
-
_ = conn.execute("""
10
-
ALTER TABLE posts
11
-
ADD COLUMN extra_data TEXT NULL
12
-
""")
13
-
14
-
# migrate old bsky identifiers from json to uri as id and cid in extra_data
15
-
data = conn.execute("SELECT id, identifier FROM posts WHERE service = 'https://bsky.app';").fetchall()
16
-
rewrites: list[tuple[str, str, int]] = []
17
-
for row in data:
18
-
if row[1][0] == '{' and row[1][-1] == '}':
19
-
data = json.loads(row[1])
20
-
rewrites.append((data['uri'], json.dumps({'cid': data['cid']}), row[0]))
21
-
if rewrites:
22
-
_ = conn.executemany("UPDATE posts SET identifier = ?, extra_data = ? WHERE id = ?;", rewrites)
-52
migrations/004_initdb_next.py
-52
migrations/004_initdb_next.py
···
1
-
import sqlite3
2
-
3
-
4
-
def migrate(conn: sqlite3.Connection):
5
-
cursor = conn.cursor()
6
-
7
-
old_posts = cursor.execute("SELECT * FROM posts;").fetchall()
8
-
old_mappings = cursor.execute("SELECT * FROM mappings;").fetchall()
9
-
10
-
_ = cursor.execute("DROP TABLE posts;")
11
-
_ = cursor.execute("DROP TABLE mappings;")
12
-
13
-
_ = cursor.execute("""
14
-
CREATE TABLE posts (
15
-
id INTEGER UNIQUE PRIMARY KEY AUTOINCREMENT,
16
-
user TEXT NOT NULL,
17
-
service TEXT NOT NULL,
18
-
identifier TEXT NOT NULL,
19
-
parent INTEGER NULL REFERENCES posts(id),
20
-
root INTEGER NULL REFERENCES posts(id),
21
-
reposted INTEGER NULL REFERENCES posts(id),
22
-
extra_data TEXT NULL
23
-
);
24
-
""")
25
-
26
-
_ = cursor.execute("""
27
-
CREATE TABLE mappings (
28
-
original INTEGER NOT NULL REFERENCES posts(id) ON DELETE CASCADE,
29
-
mapped INTEGER NOT NULL REFERENCES posts(id) ON DELETE CASCADE,
30
-
UNIQUE(original, mapped)
31
-
);
32
-
""")
33
-
34
-
for old_post in old_posts:
35
-
_ = cursor.execute(
36
-
"""
37
-
INSERT INTO posts (id, user, service, identifier, parent, root, reposted, extra_data)
38
-
VALUES (:id, :user_id, :service, :identifier, :parent_id, :root_id, :reposted_id, :extra_data)
39
-
""",
40
-
dict(old_post),
41
-
)
42
-
43
-
for mapping in old_mappings:
44
-
original, mapped = mapping["original_post_id"], mapping["mapped_post_id"]
45
-
_ = cursor.execute(
46
-
"INSERT OR IGNORE INTO mappings (original, mapped) VALUES (?, ?)",
47
-
(original, mapped),
48
-
)
49
-
_ = cursor.execute(
50
-
"INSERT OR IGNORE INTO mappings (original, mapped) VALUES (?, ?)",
51
-
(mapped, original),
52
-
)
-12
migrations/005_add_indexes.py
-12
migrations/005_add_indexes.py
···
1
-
import sqlite3
2
-
3
-
4
-
def migrate(conn: sqlite3.Connection):
5
-
_ = conn.execute("""
6
-
CREATE INDEX IF NOT EXISTS idx_posts_service_user_identifier
7
-
ON posts (service, user, identifier);
8
-
""")
9
-
_ = conn.execute("""
10
-
CREATE UNIQUE INDEX IF NOT EXISTS ux_mappings_original_mapped
11
-
ON mappings (original, mapped);
12
-
""")
-35
migrations/_registry.py
-35
migrations/_registry.py
···
1
-
import importlib.util
2
-
from pathlib import Path
3
-
import sqlite3
4
-
from typing import Callable
5
-
6
-
7
-
def load_migrations(path: Path) -> list[tuple[int, str, Callable[[sqlite3.Connection], None]]]:
8
-
migrations: list[tuple[int, str, Callable[[sqlite3.Connection], None]]] = []
9
-
migration_files = sorted(
10
-
[f for f in path.glob("*.py") if not f.stem.startswith("_")]
11
-
)
12
-
13
-
for filepath in migration_files:
14
-
filename = filepath.stem
15
-
version_str = filename.split("_")[0]
16
-
17
-
try:
18
-
version = int(version_str)
19
-
except ValueError:
20
-
raise ValueError('migrations must start with a number!!')
21
-
22
-
spec = importlib.util.spec_from_file_location(filepath.stem, filepath)
23
-
if not spec or not spec.loader:
24
-
raise Exception(f"Failed to load spec from file: {filepath}")
25
-
26
-
module = importlib.util.module_from_spec(spec)
27
-
spec.loader.exec_module(module)
28
-
29
-
if hasattr(module, "migrate"):
30
-
migrations.append((version, filename, module.migrate))
31
-
else:
32
-
raise ValueError(f"Migration {filepath.name} missing 'migrate' function")
33
-
34
-
migrations.sort(key=lambda x: x[0])
35
-
return migrations
+54
misskey/common.py
+54
misskey/common.py
···
1
+
import cross
2
+
from util.media import MediaInfo
3
+
4
+
5
+
class MisskeyPost(cross.Post):
6
+
def __init__(
7
+
self,
8
+
instance_url: str,
9
+
note: dict,
10
+
tokens: list[cross.Token],
11
+
files: list[MediaInfo],
12
+
) -> None:
13
+
super().__init__()
14
+
self.note = note
15
+
self.id = note["id"]
16
+
self.parent_id = note.get("replyId")
17
+
self.tokens = tokens
18
+
self.timestamp = note["createdAt"]
19
+
self.media_attachments = files
20
+
self.spoiler = note.get("cw")
21
+
self.sensitive = any(
22
+
[a.get("isSensitive", False) for a in note.get("files", [])]
23
+
)
24
+
self.url = instance_url + "/notes/" + note["id"]
25
+
26
+
def get_id(self) -> str:
27
+
return self.id
28
+
29
+
def get_parent_id(self) -> str | None:
30
+
return self.parent_id
31
+
32
+
def get_tokens(self) -> list[cross.Token]:
33
+
return self.tokens
34
+
35
+
def get_text_type(self) -> str:
36
+
return "text/x.misskeymarkdown"
37
+
38
+
def get_timestamp(self) -> str:
39
+
return self.timestamp
40
+
41
+
def get_attachments(self) -> list[MediaInfo]:
42
+
return self.media_attachments
43
+
44
+
def get_spoiler(self) -> str | None:
45
+
return self.spoiler
46
+
47
+
def get_languages(self) -> list[str]:
48
+
return []
49
+
50
+
def is_sensitive(self) -> bool:
51
+
return self.sensitive or (self.spoiler is not None and self.spoiler != "")
52
+
53
+
def get_post_url(self) -> str | None:
54
+
return self.url
-22
misskey/info.py
-22
misskey/info.py
···
1
-
from abc import ABC, abstractmethod
2
-
3
-
import requests
4
-
5
-
from cross.service import Service
6
-
7
-
8
-
class MisskeyService(ABC, Service):
9
-
def verify_credentials(self):
10
-
response = requests.post(
11
-
f"{self.url}/api/i",
12
-
json={"i": self._get_token()},
13
-
headers={"Content-Type": "application/json"},
14
-
)
15
-
if response.status_code != 200:
16
-
self.log.error("Failed to validate user credentials!")
17
-
response.raise_for_status()
18
-
return dict(response.json())
19
-
20
-
@abstractmethod
21
-
def _get_token(self) -> str:
22
-
pass
+126
-151
misskey/input.py
+126
-151
misskey/input.py
···
2
2
import json
3
3
import re
4
4
import uuid
5
-
from dataclasses import dataclass, field
6
-
from typing import Any, cast, override
5
+
from typing import Any, Callable
7
6
7
+
import requests
8
8
import websockets
9
9
10
-
from cross.attachments import (
11
-
LabelsAttachment,
12
-
MediaAttachment,
13
-
QuoteAttachment,
14
-
RemoteUrlAttachment,
15
-
SensitiveAttachment,
16
-
)
17
-
from cross.media import Blob, download_blob
18
-
from cross.post import Post
19
-
from cross.service import InputService
20
-
from database.connection import DatabasePool
21
-
from misskey.info import MisskeyService
22
-
from util.markdown import MarkdownParser
23
-
from util.util import normalize_service_url
10
+
import cross
11
+
import util.database as database
12
+
import util.md_util as md_util
13
+
from misskey.common import MisskeyPost
14
+
from util.media import MediaInfo, download_media
15
+
from util.util import LOGGER, as_envvar
24
16
25
17
ALLOWED_VISIBILITY = ["public", "home"]
26
18
27
19
28
-
@dataclass
29
20
class MisskeyInputOptions:
30
-
token: str
31
-
instance: str
32
-
allowed_visibility: list[str] = field(
33
-
default_factory=lambda: ALLOWED_VISIBILITY.copy()
34
-
)
35
-
filters: list[re.Pattern[str]] = field(default_factory=lambda: [])
21
+
def __init__(self, o: dict) -> None:
22
+
self.allowed_visibility = ALLOWED_VISIBILITY
23
+
self.filters = [re.compile(f) for f in o.get("regex_filters", [])]
36
24
37
-
@classmethod
38
-
def from_dict(cls, data: dict[str, Any]) -> "MisskeyInputOptions":
39
-
data["instance"] = normalize_service_url(data["instance"])
25
+
allowed_visibility = o.get("allowed_visibility")
26
+
if allowed_visibility is not None:
27
+
if any([v not in ALLOWED_VISIBILITY for v in allowed_visibility]):
28
+
raise ValueError(
29
+
f"'allowed_visibility' only accepts {', '.join(ALLOWED_VISIBILITY)}, got: {allowed_visibility}"
30
+
)
31
+
self.allowed_visibility = allowed_visibility
40
32
41
-
if "allowed_visibility" in data:
42
-
for vis in data.get("allowed_visibility", []):
43
-
if vis not in ALLOWED_VISIBILITY:
44
-
raise ValueError(f"Invalid visibility option {vis}!")
45
33
46
-
if "filters" in data:
47
-
data["filters"] = [re.compile(r) for r in data["filters"]]
34
+
class MisskeyInput(cross.Input):
35
+
def __init__(self, settings: dict, db: cross.DataBaseWorker) -> None:
36
+
self.options = MisskeyInputOptions(settings.get("options", {}))
37
+
self.token = as_envvar(settings.get("token")) or (_ for _ in ()).throw(
38
+
ValueError("'token' is required")
39
+
)
40
+
instance: str = as_envvar(settings.get("instance")) or (_ for _ in ()).throw(
41
+
ValueError("'instance' is required")
42
+
)
48
43
49
-
return MisskeyInputOptions(**data)
44
+
service = instance[:-1] if instance.endswith("/") else instance
50
45
51
-
52
-
class MisskeyInputService(MisskeyService, InputService):
53
-
def __init__(self, db: DatabasePool, options: MisskeyInputOptions) -> None:
54
-
super().__init__(options.instance, db)
55
-
self.options: MisskeyInputOptions = options
56
-
57
-
self.log.info("Verifying %s credentails...", self.url)
58
-
response = self.verify_credentials()
59
-
self.user_id: str = response["id"]
46
+
LOGGER.info("Verifying %s credentails...", service)
47
+
responce = requests.post(
48
+
f"{instance}/api/i",
49
+
json={"i": self.token},
50
+
headers={"Content-Type": "application/json"},
51
+
)
52
+
if responce.status_code != 200:
53
+
LOGGER.error("Failed to validate user credentials!")
54
+
responce.raise_for_status()
55
+
return
60
56
61
-
@override
62
-
def _get_token(self) -> str:
63
-
return self.options.token
57
+
super().__init__(service, responce.json()["id"], settings, db)
64
58
65
-
def _on_note(self, note: dict[str, Any]):
59
+
def _on_note(self, outputs: list[cross.Output], note: dict):
66
60
if note["userId"] != self.user_id:
67
61
return
68
62
69
-
if note["visibility"] not in self.options.allowed_visibility:
63
+
if note.get("visibility") not in self.options.allowed_visibility:
64
+
LOGGER.info(
65
+
"Skipping '%s'! '%s' visibility..", note["id"], note.get("visibility")
66
+
)
70
67
return
71
68
69
+
# TODO polls not supported on bsky. maybe 3rd party? skip for now
70
+
# we don't handle reblogs. possible with bridgy(?) and self
72
71
if note.get("poll"):
73
-
self.log.info("Skipping '%s'! Contains a poll..", note["id"])
72
+
LOGGER.info("Skipping '%s'! Contains a poll..", note["id"])
74
73
return
75
74
76
-
renote: dict[str, Any] | None = note.get("renote")
75
+
renote: dict | None = note.get("renote")
77
76
if renote:
78
-
if note.get("text") is None:
79
-
self._on_renote(note, renote)
77
+
if note.get("text") is not None:
78
+
LOGGER.info("Skipping '%s'! Quote..", note["id"])
80
79
return
81
80
82
-
if renote["userId"] != self.user_id:
81
+
if renote.get("userId") != self.user_id:
82
+
LOGGER.info("Skipping '%s'! Reblog of other user..", note["id"])
83
83
return
84
84
85
-
rrenote = self._get_post(self.url, self.user_id, renote["id"])
86
-
if not rrenote:
87
-
self.log.info(
88
-
"Skipping %s, quote %s not found in db", note["id"], renote["id"]
85
+
success = database.try_insert_repost(
86
+
self.db, note["id"], renote["id"], self.user_id, self.service
87
+
)
88
+
if not success:
89
+
LOGGER.info(
90
+
"Skipping '%s' as renoted note was not found in db!", note["id"]
89
91
)
90
92
return
91
93
92
-
reply: dict[str, Any] | None = note.get("reply")
93
-
if reply:
94
-
if reply.get("userId") != self.user_id:
95
-
self.log.info("Skipping '%s'! Reply to other user..", note["id"])
94
+
for output in outputs:
95
+
output.accept_repost(note["id"], renote["id"])
96
+
return
97
+
98
+
reply_id: str | None = note.get("replyId")
99
+
if reply_id:
100
+
if note.get("reply", {}).get("userId") != self.user_id:
101
+
LOGGER.info("Skipping '%s'! Reply to other user..", note["id"])
96
102
return
97
103
98
-
parent = None
99
-
if reply:
100
-
parent = self._get_post(self.url, self.user_id, reply["id"])
101
-
if not parent:
102
-
self.log.info(
103
-
"Skipping %s, parent %s not found in db", note["id"], reply["id"]
104
-
)
105
-
return
104
+
success = database.try_insert_post(
105
+
self.db, note["id"], reply_id, self.user_id, self.service
106
+
)
107
+
if not success:
108
+
LOGGER.info("Skipping '%s' as parent note was not found in db!", note["id"])
109
+
return
106
110
107
111
mention_handles: dict = note.get("mentionHandles") or {}
108
112
tags: list[str] = note.get("tags") or []
···
111
115
for key, value in mention_handles.items():
112
116
handles.append((value, value))
113
117
114
-
parser = MarkdownParser() # TODO MFM parser
115
-
tokens = parser.parse(note.get("text", ""), tags, handles)
116
-
post = Post(id=note["id"], parent_id=reply["id"] if reply else None, tokens=tokens)
118
+
tokens = md_util.tokenize_markdown(note.get("text", ""), tags, handles)
119
+
if not cross.test_filters(tokens, self.options.filters):
120
+
LOGGER.info("Skipping '%s'. Matched a filter!", note["id"])
121
+
return
117
122
118
-
post.attachments.put(RemoteUrlAttachment(url=self.url + "/notes/" + note["id"]))
119
-
if renote:
120
-
post.attachments.put(QuoteAttachment(quoted_id=renote['id'], quoted_user=self.user_id))
121
-
if any([a.get("isSensitive", False) for a in note.get("files", [])]):
122
-
post.attachments.put(SensitiveAttachment(sensitive=True))
123
-
if note.get("cw"):
124
-
post.attachments.put(LabelsAttachment(labels=[note["cw"]]))
123
+
LOGGER.info("Crossposting '%s'...", note["id"])
125
124
126
-
blobs: list[Blob] = []
127
-
for media in note.get("files", []):
128
-
self.log.info("Downloading %s...", media["url"])
129
-
blob: Blob | None = download_blob(media["url"], media.get("comment", ""))
130
-
if not blob:
131
-
self.log.error(
132
-
"Skipping %s! Failed to download media %s.",
133
-
note["id"],
134
-
media["url"],
135
-
)
125
+
media_attachments: list[MediaInfo] = []
126
+
for attachment in note.get("files", []):
127
+
LOGGER.info("Downloading %s...", attachment["url"])
128
+
info = download_media(attachment["url"], attachment.get("comment") or "")
129
+
if not info:
130
+
LOGGER.error("Skipping '%s'. Failed to download media!", note["id"])
136
131
return
137
-
blobs.append(blob)
138
-
139
-
if blobs:
140
-
post.attachments.put(MediaAttachment(blobs=blobs))
141
-
142
-
if parent:
143
-
self._insert_post(
144
-
{
145
-
"user": self.user_id,
146
-
"service": self.url,
147
-
"identifier": note["id"],
148
-
"parent": parent["id"],
149
-
"root": parent["id"] if not parent["root"] else parent["root"],
150
-
}
151
-
)
152
-
else:
153
-
self._insert_post(
154
-
{
155
-
"user": self.user_id,
156
-
"service": self.url,
157
-
"identifier": note["id"],
158
-
}
159
-
)
160
-
161
-
for out in self.outputs:
162
-
self.submitter(lambda: out.accept_post(post))
163
-
164
-
def _on_renote(self, note: dict[str, Any], renote: dict[str, Any]):
165
-
reposted = self._get_post(self.url, self.user_id, renote["id"])
166
-
if not reposted:
167
-
self.log.info(
168
-
"Skipping repost '%s' as reposted post '%s' was not found in the db.",
169
-
note["id"],
170
-
renote["id"],
171
-
)
172
-
return
173
-
174
-
self._insert_post(
175
-
{
176
-
"user": self.user_id,
177
-
"service": self.url,
178
-
"identifier": note["id"],
179
-
"reposted": reposted["id"],
180
-
}
181
-
)
132
+
media_attachments.append(info)
182
133
183
-
for out in self.outputs:
184
-
self.submitter(lambda: out.accept_repost(note["id"], renote["id"]))
134
+
cross_post = MisskeyPost(self.service, note, tokens, media_attachments)
135
+
for output in outputs:
136
+
output.accept_post(cross_post)
185
137
186
-
def _accept_msg(self, msg: websockets.Data) -> None:
187
-
data: dict[str, Any] = cast(dict[str, Any], json.loads(msg))
138
+
def _on_delete(self, outputs: list[cross.Output], note: dict):
139
+
# TODO handle deletes
140
+
pass
188
141
142
+
def _on_message(self, outputs: list[cross.Output], data: dict):
189
143
if data["type"] == "channel":
190
-
type: str = cast(str, data["body"]["type"])
144
+
type: str = data["body"]["type"]
191
145
if type == "note" or type == "reply":
192
146
note_body = data["body"]["body"]
193
-
self._on_note(note_body)
147
+
self._on_note(outputs, note_body)
194
148
return
195
149
196
-
async def _subscribe_to_home(self, ws: websockets.ClientConnection) -> None:
150
+
pass
151
+
152
+
async def _send_keepalive(self, ws: websockets.WebSocketClientProtocol):
153
+
while ws.open:
154
+
try:
155
+
await asyncio.sleep(120)
156
+
if ws.open:
157
+
await ws.send("h")
158
+
LOGGER.debug("Sent keepalive h..")
159
+
else:
160
+
LOGGER.info("WebSocket is closed, stopping keepalive task.")
161
+
break
162
+
except Exception as e:
163
+
LOGGER.error(f"Error sending keepalive: {e}")
164
+
break
165
+
166
+
async def _subscribe_to_home(self, ws: websockets.WebSocketClientProtocol):
197
167
await ws.send(
198
168
json.dumps(
199
169
{
···
202
172
}
203
173
)
204
174
)
205
-
self.log.info("Subscribed to 'homeTimeline' channel...")
175
+
LOGGER.info("Subscribed to 'homeTimeline' channel...")
206
176
207
-
@override
208
-
async def listen(self):
209
-
streaming: str = f"{'wss' if self.url.startswith('https') else 'ws'}://{self.url.split('://', 1)[1]}"
210
-
url: str = f"{streaming}/streaming?i={self.options.token}"
177
+
async def listen(
178
+
self, outputs: list[cross.Output], submit: Callable[[Callable[[], Any]], Any]
179
+
):
180
+
streaming: str = f"wss://{self.service.split('://', 1)[1]}"
181
+
url: str = f"{streaming}/streaming?i={self.token}"
211
182
212
-
async for ws in websockets.connect(url):
183
+
async for ws in websockets.connect(
184
+
url, extra_headers={"User-Agent": "XPost/0.0.3"}
185
+
):
213
186
try:
214
-
self.log.info("Listening to %s...", streaming)
187
+
LOGGER.info("Listening to %s...", streaming)
215
188
await self._subscribe_to_home(ws)
216
189
217
190
async def listen_for_messages():
218
191
async for msg in ws:
219
-
self.submitter(lambda: self._accept_msg(msg))
192
+
# TODO listen to deletes somehow
193
+
submit(lambda: self._on_message(outputs, json.loads(msg)))
220
194
195
+
keepalive = asyncio.create_task(self._send_keepalive(ws))
221
196
listen = asyncio.create_task(listen_for_messages())
222
197
223
-
_ = await asyncio.gather(listen)
198
+
await asyncio.gather(keepalive, listen)
224
199
except websockets.ConnectionClosedError as e:
225
-
self.log.error(e, stack_info=True, exc_info=True)
226
-
self.log.info("Reconnecting to %s...", streaming)
200
+
LOGGER.error(e, stack_info=True, exc_info=True)
201
+
LOGGER.info("Reconnecting to %s...", streaming)
227
202
continue
+38
misskey/mfm_util.py
+38
misskey/mfm_util.py
···
1
+
import re
2
+
3
+
import cross
4
+
5
+
MFM_PATTERN = re.compile(r"\$\[([^\[\]]+)\]")
6
+
7
+
8
+
def strip_mfm(tokens: list[cross.Token]) -> tuple[list[cross.Token], bool]:
9
+
modified = False
10
+
11
+
for tk in tokens:
12
+
if isinstance(tk, cross.TextToken):
13
+
original = tk.text
14
+
cleaned = __strip_mfm(original)
15
+
if cleaned != original:
16
+
modified = True
17
+
tk.text = cleaned
18
+
19
+
elif isinstance(tk, cross.LinkToken):
20
+
original = tk.label
21
+
cleaned = __strip_mfm(original)
22
+
if cleaned != original:
23
+
modified = True
24
+
tk.label = cleaned
25
+
26
+
return tokens, modified
27
+
28
+
29
+
def __strip_mfm(text: str) -> str:
30
+
def match_contents(match: re.Match[str]):
31
+
content = match.group(1).strip()
32
+
parts = content.split(" ", 1)
33
+
return parts[1] if len(parts) > 1 else ""
34
+
35
+
while MFM_PATTERN.search(text):
36
+
text = MFM_PATTERN.sub(match_contents, text)
37
+
38
+
return text
+6
-14
pyproject.toml
+6
-14
pyproject.toml
···
1
1
[project]
2
2
name = "xpost"
3
-
version = "0.1.0"
4
-
description = "social media crossposting tool"
3
+
version = "0.0.3"
4
+
description = "mastodon -> bluesky crossposting tool"
5
5
readme = "README.md"
6
6
requires-python = ">=3.12"
7
7
dependencies = [
8
-
"dnspython>=2.8.0",
9
-
"grapheme>=0.6.0",
8
+
"atproto>=0.0.61",
9
+
"click>=8.2.1",
10
10
"python-magic>=0.4.27",
11
-
"requests>=2.32.5",
12
-
"websockets>=15.0.1",
13
-
]
14
-
15
-
[dependency-groups]
16
-
dev = [
17
-
"pytest>=8.4.2",
11
+
"requests>=2.32.3",
12
+
"websockets>=13.1",
18
13
]
19
-
20
-
[tool.pytest.ini_options]
21
-
pythonpath = ["."]
-32
registry.py
-32
registry.py
···
1
-
from pathlib import Path
2
-
from typing import Any, Callable
3
-
4
-
from cross.service import InputService, OutputService
5
-
from database.connection import DatabasePool
6
-
7
-
input_factories: dict[str, Callable[[DatabasePool, dict[str, Any]], InputService]] = {}
8
-
output_factories: dict[str, Callable[[DatabasePool, dict[str, Any]], OutputService]] = {}
9
-
10
-
11
-
def create_input_service(db: DatabasePool, data: dict[str, Any]) -> InputService:
12
-
if "type" not in data:
13
-
raise ValueError("No `type` field in input data!")
14
-
type: str = str(data["type"])
15
-
del data["type"]
16
-
17
-
factory = input_factories.get(type)
18
-
if not factory:
19
-
raise KeyError(f"No such input service {type}!")
20
-
return factory(db, data)
21
-
22
-
23
-
def create_output_service(db: DatabasePool, data: dict[str, Any]) -> OutputService:
24
-
if "type" not in data:
25
-
raise ValueError("No `type` field in input data!")
26
-
type: str = str(data["type"])
27
-
del data["type"]
28
-
29
-
factory = output_factories.get(type)
30
-
if not factory:
31
-
raise KeyError(f"No such output service {type}!")
32
-
return factory(db, data)
-33
registry_bootstrap.py
-33
registry_bootstrap.py
···
1
-
from typing import Any
2
-
3
-
from database.connection import DatabasePool
4
-
from registry import input_factories, output_factories
5
-
6
-
7
-
class LazyFactory:
8
-
def __init__(self, module_path: str, class_name: str, options_class_name: str):
9
-
self.module_path: str = module_path
10
-
self.class_name: str = class_name
11
-
self.options_class_name: str = options_class_name
12
-
13
-
def __call__(self, db: DatabasePool, d: dict[str, Any]):
14
-
module = __import__(
15
-
self.module_path, fromlist=[self.class_name, self.options_class_name]
16
-
)
17
-
service_class = getattr(module, self.class_name)
18
-
options_class = getattr(module, self.options_class_name)
19
-
return service_class(db, options_class.from_dict(d))
20
-
21
-
def bootstrap():
22
-
input_factories["mastodon-wss"] = LazyFactory(
23
-
"mastodon.input", "MastodonInputService", "MastodonInputOptions"
24
-
)
25
-
input_factories["misskey-wss"] = LazyFactory(
26
-
"misskey.input", "MisskeyInputService", "MisskeyInputOptions"
27
-
)
28
-
input_factories["bluesky-jetstream"] = LazyFactory(
29
-
"bluesky.input", "BlueskyJetstreamInputService", "BlueskyJetstreamInputOptions"
30
-
)
31
-
output_factories['stderr'] = LazyFactory(
32
-
"util.dummy", "StderrOutputService", "DummyOptions"
33
-
)
-61
tests/util/util_test.py
-61
tests/util/util_test.py
···
1
-
import util.util as u
2
-
from unittest.mock import patch
3
-
import pytest
4
-
5
-
6
-
def test_normalize_service_url_http():
7
-
assert u.normalize_service_url("http://example.com") == "http://example.com"
8
-
assert u.normalize_service_url("http://example.com/") == "http://example.com"
9
-
10
-
11
-
def test_normalize_service_url_invalid_schemes():
12
-
with pytest.raises(ValueError, match="Invalid service url"):
13
-
_ = u.normalize_service_url("ftp://example.com")
14
-
with pytest.raises(ValueError, match="Invalid service url"):
15
-
_ = u.normalize_service_url("example.com")
16
-
with pytest.raises(ValueError, match="Invalid service url"):
17
-
_ = u.normalize_service_url("//example.com")
18
-
19
-
20
-
def test_read_env_missing_env_var():
21
-
data = {"token": "env:MISSING_VAR", "keep": "value"}
22
-
with patch.dict("os.environ", {}, clear=True):
23
-
u.read_env(data)
24
-
assert data == {"keep": "value"}
25
-
assert "token" not in data
26
-
27
-
28
-
def test_read_env_no_env_prefix():
29
-
data = {"token": "literal_value", "number": 123}
30
-
u.read_env(data)
31
-
assert data == {"token": "literal_value", "number": 123}
32
-
33
-
34
-
def test_read_env_deeply_nested():
35
-
data = {"level1": {"level2": {"token": "env:DEEP_TOKEN"}}}
36
-
with patch.dict("os.environ", {"DEEP_TOKEN": "deep_secret"}):
37
-
u.read_env(data)
38
-
assert data["level1"]["level2"]["token"] == "deep_secret"
39
-
40
-
41
-
def test_read_env_mixed_types():
42
-
data = {
43
-
"string": "env:TOKEN",
44
-
"number": 42,
45
-
"list": [1, 2, 3],
46
-
"none": None,
47
-
"bool": True,
48
-
}
49
-
with patch.dict("os.environ", {"TOKEN": "secret"}):
50
-
u.read_env(data)
51
-
assert data["string"] == "secret"
52
-
assert data["number"] == 42
53
-
assert data["list"] == [1, 2, 3]
54
-
assert data["none"] is None
55
-
assert data["bool"] is True
56
-
57
-
58
-
def test_read_env_empty_dict():
59
-
data = {}
60
-
u.read_env(data)
61
-
assert data == {}
-49
util/cache.py
-49
util/cache.py
···
1
-
from abc import ABC, abstractmethod
2
-
from pathlib import Path
3
-
import time
4
-
from typing import Generic, TypeVar, override
5
-
import pickle
6
-
7
-
K = TypeVar("K")
8
-
V = TypeVar("V")
9
-
10
-
class Cacheable(ABC):
11
-
@abstractmethod
12
-
def dump_cache(self, path: Path):
13
-
pass
14
-
15
-
@abstractmethod
16
-
def load_cache(self, path: Path):
17
-
pass
18
-
19
-
class TTLCache(Generic[K, V], Cacheable):
20
-
def __init__(self, ttl_seconds: int = 3600) -> None:
21
-
self.ttl: int = ttl_seconds
22
-
self.__cache: dict[K, tuple[V, float]] = {}
23
-
24
-
def get(self, key: K) -> V | None:
25
-
if key in self.__cache:
26
-
value, timestamp = self.__cache[key]
27
-
if time.time() - timestamp < self.ttl:
28
-
return value
29
-
else:
30
-
del self.__cache[key]
31
-
return None
32
-
33
-
def set(self, key: K, value: V) -> None:
34
-
self.__cache[key] = (value, time.time())
35
-
36
-
def clear(self) -> None:
37
-
self.__cache.clear()
38
-
39
-
@override
40
-
def dump_cache(self, path: Path) -> None:
41
-
path.parent.mkdir(parents=True, exist_ok=True)
42
-
with open(path, 'wb') as f:
43
-
pickle.dump(self.__cache, f)
44
-
45
-
@override
46
-
def load_cache(self, path: Path):
47
-
if path.exists():
48
-
with open(path, 'rb') as f:
49
-
self.__cache = pickle.load(f)
+290
util/database.py
+290
util/database.py
···
1
+
import json
2
+
import queue
3
+
import sqlite3
4
+
import threading
5
+
from concurrent.futures import Future
6
+
7
+
8
+
class DataBaseWorker:
9
+
def __init__(self, database: str) -> None:
10
+
super(DataBaseWorker, self).__init__()
11
+
self.database = database
12
+
self.queue = queue.Queue()
13
+
self.thread = threading.Thread(target=self._run, daemon=True)
14
+
self.shutdown_event = threading.Event()
15
+
self.conn = sqlite3.connect(self.database, check_same_thread=False)
16
+
self.lock = threading.Lock()
17
+
self.thread.start()
18
+
19
+
def _run(self):
20
+
while not self.shutdown_event.is_set():
21
+
try:
22
+
task, future = self.queue.get(timeout=1)
23
+
try:
24
+
with self.lock:
25
+
result = task(self.conn)
26
+
future.set_result(result)
27
+
except Exception as e:
28
+
future.set_exception(e)
29
+
finally:
30
+
self.queue.task_done()
31
+
except queue.Empty:
32
+
continue
33
+
34
+
def execute(self, sql: str, params=()):
35
+
def task(conn: sqlite3.Connection):
36
+
cursor = conn.execute(sql, params)
37
+
conn.commit()
38
+
return cursor.fetchall()
39
+
40
+
future = Future()
41
+
self.queue.put((task, future))
42
+
return future.result()
43
+
44
+
def close(self):
45
+
self.shutdown_event.set()
46
+
self.thread.join()
47
+
with self.lock:
48
+
self.conn.close()
49
+
50
+
51
+
def try_insert_repost(
52
+
db: DataBaseWorker,
53
+
post_id: str,
54
+
reposted_id: str,
55
+
input_user: str,
56
+
input_service: str,
57
+
) -> bool:
58
+
reposted = find_post(db, reposted_id, input_user, input_service)
59
+
if not reposted:
60
+
return False
61
+
62
+
insert_repost(db, post_id, reposted["id"], input_user, input_service)
63
+
return True
64
+
65
+
66
+
def try_insert_post(
67
+
db: DataBaseWorker,
68
+
post_id: str,
69
+
in_reply: str | None,
70
+
input_user: str,
71
+
input_service: str,
72
+
) -> bool:
73
+
root_id = None
74
+
parent_id = None
75
+
76
+
if in_reply:
77
+
parent_post = find_post(db, in_reply, input_user, input_service)
78
+
if not parent_post:
79
+
return False
80
+
81
+
root_id = parent_post["id"]
82
+
parent_id = root_id
83
+
if parent_post["root_id"]:
84
+
root_id = parent_post["root_id"]
85
+
86
+
if root_id and parent_id:
87
+
insert_reply(db, post_id, input_user, input_service, parent_id, root_id)
88
+
else:
89
+
insert_post(db, post_id, input_user, input_service)
90
+
91
+
return True
92
+
93
+
94
+
def insert_repost(
95
+
db: DataBaseWorker, identifier: str, reposted_id: int, user_id: str, serivce: str
96
+
) -> int:
97
+
db.execute(
98
+
"""
99
+
INSERT INTO posts (user_id, service, identifier, reposted_id)
100
+
VALUES (?, ?, ?, ?);
101
+
""",
102
+
(user_id, serivce, identifier, reposted_id),
103
+
)
104
+
return db.execute("SELECT last_insert_rowid();", ())[0][0]
105
+
106
+
107
+
def insert_post(db: DataBaseWorker, identifier: str, user_id: str, serivce: str) -> int:
108
+
db.execute(
109
+
"""
110
+
INSERT INTO posts (user_id, service, identifier)
111
+
VALUES (?, ?, ?);
112
+
""",
113
+
(user_id, serivce, identifier),
114
+
)
115
+
return db.execute("SELECT last_insert_rowid();", ())[0][0]
116
+
117
+
118
+
def insert_reply(
119
+
db: DataBaseWorker,
120
+
identifier: str,
121
+
user_id: str,
122
+
serivce: str,
123
+
parent: int,
124
+
root: int,
125
+
) -> int:
126
+
db.execute(
127
+
"""
128
+
INSERT INTO posts (user_id, service, identifier, parent_id, root_id)
129
+
VALUES (?, ?, ?, ?, ?);
130
+
""",
131
+
(user_id, serivce, identifier, parent, root),
132
+
)
133
+
return db.execute("SELECT last_insert_rowid();", ())[0][0]
134
+
135
+
136
+
def insert_mapping(db: DataBaseWorker, original: int, mapped: int):
137
+
db.execute(
138
+
"""
139
+
INSERT INTO mappings (original_post_id, mapped_post_id)
140
+
VALUES (?, ?);
141
+
""",
142
+
(original, mapped),
143
+
)
144
+
145
+
146
+
def delete_post(db: DataBaseWorker, identifier: str, user_id: str, serivce: str):
147
+
db.execute(
148
+
"""
149
+
DELETE FROM posts
150
+
WHERE identifier = ?
151
+
AND service = ?
152
+
AND user_id = ?
153
+
""",
154
+
(identifier, serivce, user_id),
155
+
)
156
+
157
+
158
+
def fetch_data(db: DataBaseWorker, identifier: str, user_id: str, service: str) -> dict:
159
+
result = db.execute(
160
+
"""
161
+
SELECT extra_data
162
+
FROM posts
163
+
WHERE identifier = ?
164
+
AND user_id = ?
165
+
AND service = ?
166
+
""",
167
+
(identifier, user_id, service),
168
+
)
169
+
if not result or not result[0]:
170
+
return {}
171
+
return json.loads(result[0][0])
172
+
173
+
174
+
def store_data(
175
+
db: DataBaseWorker, identifier: str, user_id: str, service: str, extra_data: dict
176
+
) -> None:
177
+
db.execute(
178
+
"""
179
+
UPDATE posts
180
+
SET extra_data = ?
181
+
WHERE identifier = ?
182
+
AND user_id = ?
183
+
AND service = ?
184
+
""",
185
+
(json.dumps(extra_data), identifier, user_id, service),
186
+
)
187
+
188
+
189
+
def find_mappings(
190
+
db: DataBaseWorker, original_post: int, service: str, user_id: str
191
+
) -> list[str]:
192
+
return db.execute(
193
+
"""
194
+
SELECT p.identifier
195
+
FROM posts AS p
196
+
JOIN mappings AS m
197
+
ON p.id = m.mapped_post_id
198
+
WHERE m.original_post_id = ?
199
+
AND p.service = ?
200
+
AND p.user_id = ?
201
+
ORDER BY p.id;
202
+
""",
203
+
(original_post, service, user_id),
204
+
)
205
+
206
+
207
+
def find_post_by_id(db: DataBaseWorker, id: int) -> dict | None:
208
+
result = db.execute(
209
+
"""
210
+
SELECT user_id, service, identifier, parent_id, root_id, reposted_id
211
+
FROM posts
212
+
WHERE id = ?
213
+
""",
214
+
(id,),
215
+
)
216
+
if not result:
217
+
return None
218
+
user_id, service, identifier, parent_id, root_id, reposted_id = result[0]
219
+
return {
220
+
"user_id": user_id,
221
+
"service": service,
222
+
"identifier": identifier,
223
+
"parent_id": parent_id,
224
+
"root_id": root_id,
225
+
"reposted_id": reposted_id,
226
+
}
227
+
228
+
229
+
def find_post(
230
+
db: DataBaseWorker, identifier: str, user_id: str, service: str
231
+
) -> dict | None:
232
+
result = db.execute(
233
+
"""
234
+
SELECT id, parent_id, root_id, reposted_id
235
+
FROM posts
236
+
WHERE identifier = ?
237
+
AND user_id = ?
238
+
AND service = ?
239
+
""",
240
+
(identifier, user_id, service),
241
+
)
242
+
if not result:
243
+
return None
244
+
id, parent_id, root_id, reposted_id = result[0]
245
+
return {
246
+
"id": id,
247
+
"parent_id": parent_id,
248
+
"root_id": root_id,
249
+
"reposted_id": reposted_id,
250
+
}
251
+
252
+
253
+
def find_mapped_thread(
254
+
db: DataBaseWorker,
255
+
parent_id: str,
256
+
input_user: str,
257
+
input_service: str,
258
+
output_user: str,
259
+
output_service: str,
260
+
):
261
+
reply_data: dict | None = find_post(db, parent_id, input_user, input_service)
262
+
if not reply_data:
263
+
return None
264
+
265
+
reply_mappings: list[str] | None = find_mappings(
266
+
db, reply_data["id"], output_service, output_user
267
+
)
268
+
if not reply_mappings:
269
+
return None
270
+
271
+
reply_identifier: str = reply_mappings[-1]
272
+
root_identifier: str = reply_mappings[0]
273
+
if reply_data["root_id"]:
274
+
root_data = find_post_by_id(db, reply_data["root_id"])
275
+
if not root_data:
276
+
return None
277
+
278
+
root_mappings = find_mappings(
279
+
db, reply_data["root_id"], output_service, output_user
280
+
)
281
+
if not root_mappings:
282
+
return None
283
+
root_identifier = root_mappings[0]
284
+
285
+
return (
286
+
root_identifier[0], # real ids
287
+
reply_identifier[0],
288
+
reply_data["root_id"], # db ids
289
+
reply_data["id"],
290
+
)
-29
util/dummy.py
-29
util/dummy.py
···
1
-
from typing import override
2
-
from cross.post import Post
3
-
from cross.service import OutputService
4
-
from database.connection import DatabasePool
5
-
6
-
class DummyOptions:
7
-
@classmethod
8
-
def from_dict(cls, obj) -> 'DummyOptions':
9
-
return DummyOptions()
10
-
11
-
class StderrOutputService(OutputService):
12
-
def __init__(self, db: DatabasePool, options: DummyOptions) -> None:
13
-
super().__init__("http://localhost", db)
14
-
15
-
@override
16
-
def accept_post(self, post: Post):
17
-
self.log.info("%s", post)
18
-
19
-
@override
20
-
def accept_repost(self, repost_id: str, reposted_id: str):
21
-
self.log.info("%s, %s", repost_id, reposted_id)
22
-
23
-
@override
24
-
def delete_post(self, post_id: str):
25
-
self.log.info("%s", post_id)
26
-
27
-
@override
28
-
def delete_repost(self, repost_id: str):
29
-
self.log.info("%s", repost_id)
-150
util/html.py
-150
util/html.py
···
1
-
from html.parser import HTMLParser
2
-
from typing import override
3
-
4
-
from cross.tokens import LinkToken, TextToken, Token
5
-
from util.splitter import canonical_label
6
-
7
-
8
-
class HTMLToTokensParser(HTMLParser):
9
-
def __init__(self) -> None:
10
-
super().__init__()
11
-
self.tokens: list[Token] = []
12
-
13
-
self._tag_stack: dict[str, tuple[str, dict[str, str | None]]] = {}
14
-
self.in_pre: bool = False
15
-
self.in_code: bool = False
16
-
self.invisible: bool = False
17
-
18
-
def handle_a_endtag(self):
19
-
label, _attr = self._tag_stack.pop("a")
20
-
21
-
href = _attr.get("href")
22
-
if href:
23
-
if canonical_label(label, href):
24
-
self.tokens.append(LinkToken(href=href))
25
-
else:
26
-
self.tokens.append(LinkToken(href=href, label=label))
27
-
28
-
def append_text(self, text: str):
29
-
self.tokens.append(TextToken(text=text))
30
-
31
-
def append_newline(self):
32
-
if self.tokens:
33
-
last_token = self.tokens[-1]
34
-
if isinstance(last_token, TextToken) and not last_token.text.endswith("\n"):
35
-
self.tokens.append(TextToken(text="\n"))
36
-
37
-
@override
38
-
def handle_starttag(self, tag: str, attrs: list[tuple[str, str | None]]) -> None:
39
-
_attr = dict(attrs)
40
-
41
-
if self.invisible:
42
-
return
43
-
44
-
match tag:
45
-
case "p":
46
-
cls = _attr.get("class", "")
47
-
if cls and "quote-inline" in cls:
48
-
self.invisible = True
49
-
case "a":
50
-
self._tag_stack["a"] = ("", _attr)
51
-
case "code":
52
-
if not self.in_pre:
53
-
self.append_text("`")
54
-
self.in_code = True
55
-
case "pre":
56
-
self.append_newline()
57
-
self.append_text("```\n")
58
-
self.in_pre = True
59
-
case "blockquote":
60
-
self.append_newline()
61
-
self.append_text("> ")
62
-
case "strong" | "b":
63
-
self.append_text("**")
64
-
case "em" | "i":
65
-
self.append_text("*")
66
-
case "del" | "s":
67
-
self.append_text("~~")
68
-
case "br":
69
-
self.append_text("\n")
70
-
case "h1" | "h2" | "h3" | "h4" | "h5" | "h6":
71
-
level = int(tag[1])
72
-
self.append_text("\n" + "#" * level + " ")
73
-
case _:
74
-
# self.builder.extend(f"<{tag}>".encode("utf-8"))
75
-
pass
76
-
77
-
@override
78
-
def handle_endtag(self, tag: str) -> None:
79
-
if self.invisible:
80
-
if tag == "p":
81
-
self.invisible = False
82
-
return
83
-
84
-
match tag:
85
-
case "a":
86
-
if "a" in self._tag_stack:
87
-
self.handle_a_endtag()
88
-
case "code":
89
-
if not self.in_pre and self.in_code:
90
-
self.append_text("`")
91
-
self.in_code = False
92
-
case "pre":
93
-
self.append_newline()
94
-
self.append_text("```\n")
95
-
self.in_pre = False
96
-
case "blockquote":
97
-
self.append_text("\n")
98
-
case "strong" | "b":
99
-
self.append_text("**")
100
-
case "em" | "i":
101
-
self.append_text("*")
102
-
case "del" | "s":
103
-
self.append_text("~~")
104
-
case "p":
105
-
self.append_text("\n\n")
106
-
case "h1" | "h2" | "h3" | "h4" | "h5" | "h6":
107
-
self.append_text("\n")
108
-
case _:
109
-
# self.builder.extend(f"</{tag}>".encode("utf-8"))
110
-
pass
111
-
112
-
@override
113
-
def handle_data(self, data: str) -> None:
114
-
if self.invisible:
115
-
return
116
-
117
-
if self._tag_stack.get('a'):
118
-
label, _attr = self._tag_stack.pop("a")
119
-
self._tag_stack["a"] = (label + data, _attr)
120
-
return
121
-
122
-
def get_result(self) -> list[Token]:
123
-
if not self.tokens:
124
-
return []
125
-
126
-
combined: list[Token] = []
127
-
buffer: list[str] = []
128
-
129
-
def flush_buffer():
130
-
if buffer:
131
-
merged = "".join(buffer)
132
-
combined.append(TextToken(text=merged))
133
-
buffer.clear()
134
-
135
-
for token in self.tokens:
136
-
if isinstance(token, TextToken):
137
-
buffer.append(token.text)
138
-
else:
139
-
flush_buffer()
140
-
combined.append(token)
141
-
142
-
flush_buffer()
143
-
144
-
if combined and isinstance(combined[-1], TextToken):
145
-
if combined[-1].text.endswith("\n\n"):
146
-
combined[-1] = TextToken(text=combined[-1].text[:-2])
147
-
148
-
if combined[-1].text.endswith("\n"):
149
-
combined[-1] = TextToken(text=combined[-1].text[:-1])
150
-
return combined
+172
util/html_util.py
+172
util/html_util.py
···
1
+
from html.parser import HTMLParser
2
+
3
+
import cross
4
+
5
+
6
+
class HTMLPostTokenizer(HTMLParser):
7
+
def __init__(self) -> None:
8
+
super().__init__()
9
+
self.tokens: list[cross.Token] = []
10
+
11
+
self.mentions: list[tuple[str, str]]
12
+
self.tags: list[str]
13
+
14
+
self.in_pre = False
15
+
self.in_code = False
16
+
17
+
self.current_tag_stack = []
18
+
self.list_stack = []
19
+
20
+
self.anchor_stack = []
21
+
self.anchor_data = []
22
+
23
+
def handle_starttag(self, tag: str, attrs: list[tuple[str, str | None]]) -> None:
24
+
attrs_dict = dict(attrs)
25
+
26
+
def append_newline():
27
+
if self.tokens:
28
+
last_token = self.tokens[-1]
29
+
if isinstance(
30
+
last_token, cross.TextToken
31
+
) and not last_token.text.endswith("\n"):
32
+
self.tokens.append(cross.TextToken("\n"))
33
+
34
+
match tag:
35
+
case "br":
36
+
self.tokens.append(cross.TextToken(" \n"))
37
+
case "a":
38
+
href = attrs_dict.get("href", "")
39
+
self.anchor_stack.append(href)
40
+
case "strong", "b":
41
+
self.tokens.append(cross.TextToken("**"))
42
+
case "em", "i":
43
+
self.tokens.append(cross.TextToken("*"))
44
+
case "del", "s":
45
+
self.tokens.append(cross.TextToken("~~"))
46
+
case "code":
47
+
if not self.in_pre:
48
+
self.tokens.append(cross.TextToken("`"))
49
+
self.in_code = True
50
+
case "pre":
51
+
append_newline()
52
+
self.tokens.append(cross.TextToken("```\n"))
53
+
self.in_pre = True
54
+
case "blockquote":
55
+
append_newline()
56
+
self.tokens.append(cross.TextToken("> "))
57
+
case "ul", "ol":
58
+
self.list_stack.append(tag)
59
+
append_newline()
60
+
case "li":
61
+
indent = " " * (len(self.list_stack) - 1)
62
+
if self.list_stack and self.list_stack[-1] == "ul":
63
+
self.tokens.append(cross.TextToken(f"{indent}- "))
64
+
elif self.list_stack and self.list_stack[-1] == "ol":
65
+
self.tokens.append(cross.TextToken(f"{indent}1. "))
66
+
case _:
67
+
if tag in {"h1", "h2", "h3", "h4", "h5", "h6"}:
68
+
level = int(tag[1])
69
+
self.tokens.append(cross.TextToken("\n" + "#" * level + " "))
70
+
71
+
self.current_tag_stack.append(tag)
72
+
73
+
def handle_data(self, data: str) -> None:
74
+
if self.anchor_stack:
75
+
self.anchor_data.append(data)
76
+
else:
77
+
self.tokens.append(cross.TextToken(data))
78
+
79
+
def handle_endtag(self, tag: str) -> None:
80
+
if not self.current_tag_stack:
81
+
return
82
+
83
+
if tag in self.current_tag_stack:
84
+
self.current_tag_stack.remove(tag)
85
+
86
+
match tag:
87
+
case "p":
88
+
self.tokens.append(cross.TextToken("\n\n"))
89
+
case "a":
90
+
href = self.anchor_stack.pop()
91
+
anchor_data = "".join(self.anchor_data)
92
+
self.anchor_data = []
93
+
94
+
if anchor_data.startswith("#"):
95
+
as_tag = anchor_data[1:].lower()
96
+
if any(as_tag == block for block in self.tags):
97
+
self.tokens.append(cross.TagToken(anchor_data[1:]))
98
+
elif anchor_data.startswith("@"):
99
+
match = next(
100
+
(pair for pair in self.mentions if anchor_data in pair), None
101
+
)
102
+
103
+
if match:
104
+
self.tokens.append(cross.MentionToken(match[1], ""))
105
+
else:
106
+
self.tokens.append(cross.LinkToken(href, anchor_data))
107
+
case "strong", "b":
108
+
self.tokens.append(cross.TextToken("**"))
109
+
case "em", "i":
110
+
self.tokens.append(cross.TextToken("*"))
111
+
case "del", "s":
112
+
self.tokens.append(cross.TextToken("~~"))
113
+
case "code":
114
+
if not self.in_pre and self.in_code:
115
+
self.tokens.append(cross.TextToken("`"))
116
+
self.in_code = False
117
+
case "pre":
118
+
self.tokens.append(cross.TextToken("\n```\n"))
119
+
self.in_pre = False
120
+
case "blockquote":
121
+
self.tokens.append(cross.TextToken("\n"))
122
+
case "ul", "ol":
123
+
if self.list_stack:
124
+
self.list_stack.pop()
125
+
self.tokens.append(cross.TextToken("\n"))
126
+
case "li":
127
+
self.tokens.append(cross.TextToken("\n"))
128
+
case _:
129
+
if tag in ["h1", "h2", "h3", "h4", "h5", "h6"]:
130
+
self.tokens.append(cross.TextToken("\n"))
131
+
132
+
def get_tokens(self) -> list[cross.Token]:
133
+
if not self.tokens:
134
+
return []
135
+
136
+
combined: list[cross.Token] = []
137
+
buffer: list[str] = []
138
+
139
+
def flush_buffer():
140
+
if buffer:
141
+
merged = "".join(buffer)
142
+
combined.append(cross.TextToken(text=merged))
143
+
buffer.clear()
144
+
145
+
for token in self.tokens:
146
+
if isinstance(token, cross.TextToken):
147
+
buffer.append(token.text)
148
+
else:
149
+
flush_buffer()
150
+
combined.append(token)
151
+
152
+
flush_buffer()
153
+
154
+
if combined and isinstance(combined[-1], cross.TextToken):
155
+
if combined[-1].text.endswith("\n\n"):
156
+
combined[-1] = cross.TextToken(combined[-1].text[:-2])
157
+
return combined
158
+
159
+
def reset(self):
160
+
"""Reset the parser state for reuse."""
161
+
super().reset()
162
+
self.tokens = []
163
+
164
+
self.mentions = []
165
+
self.tags = []
166
+
167
+
self.in_pre = False
168
+
self.in_code = False
169
+
170
+
self.current_tag_stack = []
171
+
self.anchor_stack = []
172
+
self.list_stack = []
-126
util/markdown.py
-126
util/markdown.py
···
1
-
import re
2
-
3
-
from cross.tokens import LinkToken, MentionToken, TagToken, TextToken, Token
4
-
from util.html import HTMLToTokensParser
5
-
from util.splitter import canonical_label
6
-
7
-
URL = re.compile(r"(?:(?:[A-Za-z][A-Za-z0-9+.-]*://)|mailto:)[^\s]+", re.IGNORECASE)
8
-
MD_INLINE_LINK = re.compile(
9
-
r"\[([^\]]+)\]\(\s*((?:(?:[A-Za-z][A-Za-z0-9+.\-]*://)|mailto:)[^\s\)]+)\s*\)",
10
-
re.IGNORECASE,
11
-
)
12
-
MD_AUTOLINK = re.compile(
13
-
r"<((?:(?:[A-Za-z][A-Za-z0-9+.\-]*://)|mailto:)[^\s>]+)>", re.IGNORECASE
14
-
)
15
-
HASHTAG = re.compile(r"(?<!\w)\#([\w]+)")
16
-
FEDIVERSE_HANDLE = re.compile(r"(?<![\w@])@([\w\.-]+)(?:@([\w\.-]+\.[\w\.-]+))?")
17
-
18
-
REGEXES = [URL, MD_INLINE_LINK, MD_AUTOLINK, HASHTAG, FEDIVERSE_HANDLE]
19
-
20
-
21
-
# TODO autolinks are broken by the html parser
22
-
class MarkdownParser:
23
-
def parse(
24
-
self, text: str, tags: list[str], handles: list[tuple[str, str]]
25
-
) -> list[Token]:
26
-
if not text:
27
-
return []
28
-
29
-
tokenizer = HTMLToTokensParser()
30
-
tokenizer.feed(text)
31
-
html_tokens = tokenizer.get_result()
32
-
33
-
tokens: list[Token] = []
34
-
35
-
for tk in html_tokens:
36
-
if isinstance(tk, TextToken):
37
-
tokens.extend(self.__tokenize_md(tk.text, tags, handles))
38
-
elif isinstance(tk, LinkToken):
39
-
if not tk.label or canonical_label(tk.label, tk.href):
40
-
tokens.append(tk)
41
-
continue
42
-
43
-
tokens.extend(
44
-
self.__tokenize_md(f"[{tk.label}]({tk.href})", tags, handles)
45
-
)
46
-
else:
47
-
tokens.append(tk)
48
-
49
-
return tokens
50
-
51
-
def __tokenize_md(
52
-
self, text: str, tags: list[str], handles: list[tuple[str, str]]
53
-
) -> list[Token]:
54
-
index: int = 0
55
-
total: int = len(text)
56
-
buffer: list[str] = []
57
-
58
-
tokens: list[Token] = []
59
-
60
-
def flush():
61
-
nonlocal buffer
62
-
if buffer:
63
-
tokens.append(TextToken(text="".join(buffer)))
64
-
buffer = []
65
-
66
-
while index < total:
67
-
if text[index] == "[":
68
-
md_inline = MD_INLINE_LINK.match(text, index)
69
-
if md_inline:
70
-
flush()
71
-
label = md_inline.group(1)
72
-
href = md_inline.group(2)
73
-
tokens.append(LinkToken(href=href, label=label))
74
-
index = md_inline.end()
75
-
continue
76
-
77
-
if text[index] == "<":
78
-
md_auto = MD_AUTOLINK.match(text, index)
79
-
if md_auto:
80
-
flush()
81
-
href = md_auto.group(1)
82
-
tokens.append(LinkToken(href=href, label=None))
83
-
index = md_auto.end()
84
-
continue
85
-
86
-
if text[index] == "#":
87
-
tag = HASHTAG.match(text, index)
88
-
if tag:
89
-
tag_text = tag.group(1)
90
-
if tag_text.lower() in tags:
91
-
flush()
92
-
tokens.append(TagToken(tag=tag_text))
93
-
index = tag.end()
94
-
continue
95
-
96
-
if text[index] == "@":
97
-
handle = FEDIVERSE_HANDLE.match(text, index)
98
-
if handle:
99
-
handle_text = handle.group(0)
100
-
stripped_handle = handle_text.strip()
101
-
102
-
match = next(
103
-
(pair for pair in handles if stripped_handle in pair), None
104
-
)
105
-
106
-
if match:
107
-
flush()
108
-
tokens.append(
109
-
MentionToken(username=match[1], uri=None)
110
-
) # TODO: misskey doesnโt provide a uri
111
-
index = handle.end()
112
-
continue
113
-
114
-
url = URL.match(text, index)
115
-
if url:
116
-
flush()
117
-
href = url.group(0)
118
-
tokens.append(LinkToken(href=href, label=None))
119
-
index = url.end()
120
-
continue
121
-
122
-
buffer.append(text[index])
123
-
index += 1
124
-
125
-
flush()
126
-
return tokens
+123
util/md_util.py
+123
util/md_util.py
···
1
+
import re
2
+
3
+
import cross
4
+
import util.html_util as html_util
5
+
import util.util as util
6
+
7
+
URL = re.compile(r"(?:(?:[A-Za-z][A-Za-z0-9+.-]*://)|mailto:)[^\s]+", re.IGNORECASE)
8
+
MD_INLINE_LINK = re.compile(
9
+
r"\[([^\]]+)\]\(\s*((?:(?:[A-Za-z][A-Za-z0-9+.\-]*://)|mailto:)[^\s\)]+)\s*\)",
10
+
re.IGNORECASE,
11
+
)
12
+
MD_AUTOLINK = re.compile(
13
+
r"<((?:(?:[A-Za-z][A-Za-z0-9+.\-]*://)|mailto:)[^\s>]+)>", re.IGNORECASE
14
+
)
15
+
HASHTAG = re.compile(r"(?<!\w)\#([\w]+)")
16
+
FEDIVERSE_HANDLE = re.compile(r"(?<![\w@])@([\w\.-]+)(?:@([\w\.-]+\.[\w\.-]+))?")
17
+
18
+
19
+
def tokenize_markdown(
20
+
text: str, tags: list[str], handles: list[tuple[str, str]]
21
+
) -> list[cross.Token]:
22
+
if not text:
23
+
return []
24
+
25
+
tokenizer = html_util.HTMLPostTokenizer()
26
+
tokenizer.mentions = handles
27
+
tokenizer.tags = tags
28
+
tokenizer.feed(text)
29
+
html_tokens = tokenizer.get_tokens()
30
+
31
+
tokens: list[cross.Token] = []
32
+
33
+
for tk in html_tokens:
34
+
if isinstance(tk, cross.TextToken):
35
+
tokens.extend(__tokenize_md(tk.text, tags, handles))
36
+
elif isinstance(tk, cross.LinkToken):
37
+
if not tk.label or util.canonical_label(tk.label, tk.href):
38
+
tokens.append(tk)
39
+
continue
40
+
41
+
tokens.extend(__tokenize_md(f"[{tk.label}]({tk.href})", tags, handles))
42
+
else:
43
+
tokens.append(tk)
44
+
45
+
return tokens
46
+
47
+
48
+
def __tokenize_md(
49
+
text: str, tags: list[str], handles: list[tuple[str, str]]
50
+
) -> list[cross.Token]:
51
+
index: int = 0
52
+
total: int = len(text)
53
+
buffer: list[str] = []
54
+
55
+
tokens: list[cross.Token] = []
56
+
57
+
def flush():
58
+
nonlocal buffer
59
+
if buffer:
60
+
tokens.append(cross.TextToken("".join(buffer)))
61
+
buffer = []
62
+
63
+
while index < total:
64
+
if text[index] == "[":
65
+
md_inline = MD_INLINE_LINK.match(text, index)
66
+
if md_inline:
67
+
flush()
68
+
label = md_inline.group(1)
69
+
href = md_inline.group(2)
70
+
tokens.append(cross.LinkToken(href, label))
71
+
index = md_inline.end()
72
+
continue
73
+
74
+
if text[index] == "<":
75
+
md_auto = MD_AUTOLINK.match(text, index)
76
+
if md_auto:
77
+
flush()
78
+
href = md_auto.group(1)
79
+
tokens.append(cross.LinkToken(href, href))
80
+
index = md_auto.end()
81
+
continue
82
+
83
+
if text[index] == "#":
84
+
tag = HASHTAG.match(text, index)
85
+
if tag:
86
+
tag_text = tag.group(1)
87
+
if tag_text.lower() in tags:
88
+
flush()
89
+
tokens.append(cross.TagToken(tag_text))
90
+
index = tag.end()
91
+
continue
92
+
93
+
if text[index] == "@":
94
+
handle = FEDIVERSE_HANDLE.match(text, index)
95
+
if handle:
96
+
handle_text = handle.group(0)
97
+
stripped_handle = handle_text.strip()
98
+
99
+
match = next(
100
+
(pair for pair in handles if stripped_handle in pair), None
101
+
)
102
+
103
+
if match:
104
+
flush()
105
+
tokens.append(
106
+
cross.MentionToken(match[1], "")
107
+
) # TODO: misskey doesnโt provide a uri
108
+
index = handle.end()
109
+
continue
110
+
111
+
url = URL.match(text, index)
112
+
if url:
113
+
flush()
114
+
href = url.group(0)
115
+
tokens.append(cross.LinkToken(href, href))
116
+
index = url.end()
117
+
continue
118
+
119
+
buffer.append(text[index])
120
+
index += 1
121
+
122
+
flush()
123
+
return tokens
+160
util/media.py
+160
util/media.py
···
1
+
import json
2
+
import os
3
+
import re
4
+
import subprocess
5
+
import urllib.parse
6
+
7
+
import magic
8
+
import requests
9
+
10
+
from util.util import LOGGER
11
+
12
+
FILENAME = re.compile(r'filename="?([^\";]*)"?')
13
+
MAGIC = magic.Magic(mime=True)
14
+
15
+
16
+
class MediaInfo:
17
+
def __init__(self, url: str, name: str, mime: str, alt: str, io: bytes) -> None:
18
+
self.url = url
19
+
self.name = name
20
+
self.mime = mime
21
+
self.alt = alt
22
+
self.io = io
23
+
24
+
25
+
def download_media(url: str, alt: str) -> MediaInfo | None:
26
+
name = get_filename_from_url(url)
27
+
io = download_blob(url, max_bytes=100_000_000)
28
+
if not io:
29
+
LOGGER.error("Failed to download media attachment! %s", url)
30
+
return None
31
+
mime = MAGIC.from_buffer(io)
32
+
if not mime:
33
+
mime = "application/octet-stream"
34
+
return MediaInfo(url, name, mime, alt, io)
35
+
36
+
37
+
def get_filename_from_url(url):
38
+
try:
39
+
response = requests.head(url, allow_redirects=True)
40
+
disposition = response.headers.get("Content-Disposition")
41
+
if disposition:
42
+
filename = FILENAME.findall(disposition)
43
+
if filename:
44
+
return filename[0]
45
+
except requests.RequestException:
46
+
pass
47
+
48
+
parsed_url = urllib.parse.urlparse(url)
49
+
base_name = os.path.basename(parsed_url.path)
50
+
51
+
# hardcoded fix to return the cid for pds
52
+
if base_name == "com.atproto.sync.getBlob":
53
+
qs = urllib.parse.parse_qs(parsed_url.query)
54
+
if qs and qs.get("cid"):
55
+
return qs["cid"][0]
56
+
57
+
return base_name
58
+
59
+
60
+
def probe_bytes(bytes: bytes) -> dict:
61
+
cmd = [
62
+
"ffprobe",
63
+
"-v", "error",
64
+
"-show_format",
65
+
"-show_streams",
66
+
"-print_format", "json",
67
+
"pipe:0",
68
+
]
69
+
proc = subprocess.run(
70
+
cmd, input=bytes, stdout=subprocess.PIPE, stderr=subprocess.PIPE
71
+
)
72
+
73
+
if proc.returncode != 0:
74
+
raise RuntimeError(f"ffprobe failed: {proc.stderr.decode()}")
75
+
76
+
return json.loads(proc.stdout)
77
+
78
+
79
+
def convert_to_mp4(video_bytes: bytes) -> bytes:
80
+
cmd = [
81
+
"ffmpeg",
82
+
"-i", "pipe:0",
83
+
"-c:v", "libx264",
84
+
"-crf", "30",
85
+
"-preset", "slow",
86
+
"-c:a", "aac",
87
+
"-b:a", "128k",
88
+
"-movflags", "frag_keyframe+empty_moov+default_base_moof",
89
+
"-f", "mp4",
90
+
"pipe:1",
91
+
]
92
+
93
+
proc = subprocess.Popen(
94
+
cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE
95
+
)
96
+
out_bytes, err = proc.communicate(input=video_bytes)
97
+
98
+
if proc.returncode != 0:
99
+
raise RuntimeError(f"ffmpeg compress failed: {err.decode()}")
100
+
101
+
return out_bytes
102
+
103
+
104
+
def compress_image(image_bytes: bytes, quality: int = 90):
105
+
cmd = [
106
+
"ffmpeg",
107
+
"-f", "image2pipe",
108
+
"-i", "pipe:0",
109
+
"-c:v", "webp",
110
+
"-q:v", str(quality),
111
+
"-f", "image2pipe",
112
+
"pipe:1",
113
+
]
114
+
115
+
proc = subprocess.Popen(
116
+
cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE, stderr=subprocess.PIPE
117
+
)
118
+
out_bytes, err = proc.communicate(input=image_bytes)
119
+
120
+
if proc.returncode != 0:
121
+
raise RuntimeError(f"ffmpeg compress failed: {err.decode()}")
122
+
123
+
return out_bytes
124
+
125
+
126
+
def download_blob(url: str, max_bytes: int = 5_000_000) -> bytes | None:
127
+
response = requests.get(url, stream=True, timeout=20)
128
+
if response.status_code != 200:
129
+
LOGGER.info("Failed to download %s! %s", url, response.text)
130
+
return None
131
+
132
+
downloaded_bytes = b""
133
+
current_size = 0
134
+
135
+
for chunk in response.iter_content(chunk_size=8192):
136
+
if not chunk:
137
+
continue
138
+
139
+
current_size += len(chunk)
140
+
if current_size > max_bytes:
141
+
response.close()
142
+
return None
143
+
144
+
downloaded_bytes += chunk
145
+
146
+
return downloaded_bytes
147
+
148
+
149
+
def get_media_meta(bytes: bytes):
150
+
probe = probe_bytes(bytes)
151
+
streams = [s for s in probe["streams"] if s["codec_type"] == "video"]
152
+
if not streams:
153
+
raise ValueError("No video stream found")
154
+
155
+
media = streams[0]
156
+
return {
157
+
"width": int(media["width"]),
158
+
"height": int(media["height"]),
159
+
"duration": float(media.get("duration", probe["format"].get("duration", -1))),
160
+
}
-120
util/splitter.py
-120
util/splitter.py
···
1
-
import re
2
-
from dataclasses import replace
3
-
4
-
import grapheme
5
-
6
-
from cross.tokens import LinkToken, TagToken, TextToken, Token
7
-
8
-
9
-
def canonical_label(label: str | None, href: str):
10
-
if not label or label == href:
11
-
return True
12
-
13
-
split = href.split("://", 1)
14
-
if len(split) > 1:
15
-
if split[1] == label:
16
-
return True
17
-
18
-
return False
19
-
20
-
21
-
ALTERNATE = re.compile(r"\S+|\s+")
22
-
23
-
24
-
def split_tokens(
25
-
tokens: list[Token],
26
-
max_chars: int,
27
-
max_link_len: int = 35,
28
-
) -> list[list[Token]]:
29
-
def new_block() -> None:
30
-
nonlocal blocks, block, length
31
-
if block:
32
-
blocks.append(block)
33
-
block, length = [], 0
34
-
35
-
def append_text(text: str) -> None:
36
-
nonlocal block
37
-
if block and isinstance(block[-1], TextToken):
38
-
block[-1] = replace(block[-1], text=block[-1].text + text)
39
-
else:
40
-
block.append(TextToken(text=text))
41
-
42
-
blocks: list[list[Token]] = []
43
-
block: list[Token] = []
44
-
length: int = 0
45
-
46
-
for tk in tokens:
47
-
if isinstance(tk, TagToken):
48
-
tag_len = 1 + grapheme.length(tk.tag)
49
-
if length + tag_len > max_chars:
50
-
new_block()
51
-
block.append(tk)
52
-
length += tag_len
53
-
continue
54
-
if isinstance(tk, LinkToken):
55
-
label_text = tk.label or ""
56
-
link_len = grapheme.length(label_text)
57
-
58
-
if canonical_label(tk.label, tk.href):
59
-
link_len = min(link_len, max_link_len)
60
-
61
-
if length + link_len <= max_chars:
62
-
block.append(tk)
63
-
length += link_len
64
-
continue
65
-
66
-
if length:
67
-
new_block()
68
-
69
-
remaining = label_text
70
-
while remaining:
71
-
room = (
72
-
max_chars
73
-
- length
74
-
- (0 if grapheme.length(remaining) <= max_chars else 1)
75
-
)
76
-
chunk = grapheme.slice(remaining, 0, room)
77
-
if grapheme.length(remaining) > room:
78
-
chunk += "-"
79
-
80
-
block.append(replace(tk, label=chunk))
81
-
length += grapheme.length(chunk)
82
-
83
-
remaining = grapheme.slice(remaining, room, grapheme.length(remaining))
84
-
if remaining:
85
-
new_block()
86
-
continue
87
-
if isinstance(tk, TextToken):
88
-
for seg in ALTERNATE.findall(tk.text):
89
-
seg_len = grapheme.length(seg)
90
-
91
-
if length + seg_len <= max_chars - (0 if seg.isspace() else 1):
92
-
append_text(seg)
93
-
length += seg_len
94
-
continue
95
-
96
-
if length:
97
-
new_block()
98
-
99
-
if not seg.isspace():
100
-
while grapheme.length(seg) > max_chars - 1:
101
-
chunk = grapheme.slice(seg, 0, max_chars - 1) + "-"
102
-
append_text(chunk)
103
-
new_block()
104
-
seg = grapheme.slice(seg, max_chars - 1, grapheme.length(seg))
105
-
else:
106
-
while grapheme.length(seg) > max_chars:
107
-
chunk = grapheme.slice(seg, 0, max_chars)
108
-
append_text(chunk)
109
-
new_block()
110
-
seg = grapheme.slice(seg, max_chars, grapheme.length(seg))
111
-
112
-
if seg:
113
-
append_text(seg)
114
-
length = grapheme.length(seg)
115
-
continue
116
-
block.append(tk)
117
-
if block:
118
-
blocks.append(block)
119
-
120
-
return blocks
+35
-26
util/util.py
+35
-26
util/util.py
···
1
+
import json
1
2
import logging
3
+
import os
2
4
import sys
3
-
import os
4
-
from typing import Any, Callable
5
+
6
+
logging.basicConfig(stream=sys.stdout, level=logging.INFO)
7
+
LOGGER = logging.getLogger("XPost")
8
+
9
+
10
+
def as_json(obj, indent=None, sort_keys=False) -> str:
11
+
return json.dumps(
12
+
obj.__dict__ if not isinstance(obj, dict) else obj,
13
+
default=lambda o: o.__json__() if hasattr(o, "__json__") else o.__dict__,
14
+
indent=indent,
15
+
sort_keys=sort_keys,
16
+
)
17
+
18
+
19
+
def canonical_label(label: str | None, href: str):
20
+
if not label or label == href:
21
+
return True
22
+
23
+
split = href.split("://", 1)
24
+
if len(split) > 1:
25
+
if split[1] == label:
26
+
return True
27
+
28
+
return False
5
29
6
-
import env
7
30
8
-
shutdown_hook: list[Callable[[], None]] = []
31
+
def safe_get(obj: dict, key: str, default):
32
+
val = obj.get(key, default)
33
+
return val if val else default
9
34
10
-
logging.basicConfig(stream=sys.stderr, level=logging.DEBUG if env.DEV else logging.INFO)
11
-
LOGGER = logging.getLogger("XPost")
12
35
13
-
def normalize_service_url(url: str) -> str:
14
-
if not url.startswith("https://") and not url.startswith("http://"):
15
-
raise ValueError(f"Invalid service url {url}! Only http/https are supported.")
36
+
def as_envvar(text: str | None) -> str | None:
37
+
if not text:
38
+
return None
16
39
17
-
return url[:-1] if url.endswith('/') else url
40
+
if text.startswith("env:"):
41
+
return os.environ.get(text[4:], "")
18
42
19
-
def read_env(data: dict[str, Any]) -> None:
20
-
keys = list(data.keys())
21
-
for key in keys:
22
-
val = data[key]
23
-
match val:
24
-
case str():
25
-
if val.startswith('env:'):
26
-
envval = os.environ.get(val[4:])
27
-
if envval is None:
28
-
del data[key]
29
-
else:
30
-
data[key] = envval
31
-
case dict():
32
-
read_env(val)
33
-
case _:
34
-
pass
43
+
return text
+337
-136
uv.lock
+337
-136
uv.lock
···
1
1
version = 1
2
-
revision = 3
2
+
revision = 2
3
3
requires-python = ">=3.12"
4
4
5
5
[[package]]
6
+
name = "annotated-types"
7
+
version = "0.7.0"
8
+
source = { registry = "https://pypi.org/simple" }
9
+
sdist = { url = "https://files.pythonhosted.org/packages/ee/67/531ea369ba64dcff5ec9c3402f9f51bf748cec26dde048a2f973a4eea7f5/annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89", size = 16081, upload-time = "2024-05-20T21:33:25.928Z" }
10
+
wheels = [
11
+
{ url = "https://files.pythonhosted.org/packages/78/b6/6307fbef88d9b5ee7421e68d78a9f162e0da4900bc5f5793f6d3d0e34fb8/annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53", size = 13643, upload-time = "2024-05-20T21:33:24.1Z" },
12
+
]
13
+
14
+
[[package]]
15
+
name = "anyio"
16
+
version = "4.9.0"
17
+
source = { registry = "https://pypi.org/simple" }
18
+
dependencies = [
19
+
{ name = "idna" },
20
+
{ name = "sniffio" },
21
+
{ name = "typing-extensions", marker = "python_full_version < '3.13'" },
22
+
]
23
+
sdist = { url = "https://files.pythonhosted.org/packages/95/7d/4c1bd541d4dffa1b52bd83fb8527089e097a106fc90b467a7313b105f840/anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028", size = 190949, upload-time = "2025-03-17T00:02:54.77Z" }
24
+
wheels = [
25
+
{ url = "https://files.pythonhosted.org/packages/a1/ee/48ca1a7c89ffec8b6a0c5d02b89c305671d5ffd8d3c94acf8b8c408575bb/anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c", size = 100916, upload-time = "2025-03-17T00:02:52.713Z" },
26
+
]
27
+
28
+
[[package]]
29
+
name = "atproto"
30
+
version = "0.0.61"
31
+
source = { registry = "https://pypi.org/simple" }
32
+
dependencies = [
33
+
{ name = "click" },
34
+
{ name = "cryptography" },
35
+
{ name = "dnspython" },
36
+
{ name = "httpx" },
37
+
{ name = "libipld" },
38
+
{ name = "pydantic" },
39
+
{ name = "typing-extensions" },
40
+
{ name = "websockets" },
41
+
]
42
+
sdist = { url = "https://files.pythonhosted.org/packages/b1/59/6f5074b3a45e0e3c1853544240e9039e86219feb30ff1bb5e8582c791547/atproto-0.0.61.tar.gz", hash = "sha256:98e022daf538d14f134ce7c91d42c4c973f3493ac56e43a84daa4c881f102beb", size = 189208, upload-time = "2025-04-19T00:20:11.918Z" }
43
+
wheels = [
44
+
{ url = "https://files.pythonhosted.org/packages/bd/b6/da9963bf54d4c0a8a590b6297d8858c395243dbb04cb581fdadb5fe7eac7/atproto-0.0.61-py3-none-any.whl", hash = "sha256:658da5832aaeea4a12a9a74235f9c90c11453e77d596fdccb1f8b39d56245b88", size = 380426, upload-time = "2025-04-19T00:20:10.026Z" },
45
+
]
46
+
47
+
[[package]]
6
48
name = "certifi"
7
-
version = "2025.10.5"
49
+
version = "2025.4.26"
8
50
source = { registry = "https://pypi.org/simple" }
9
-
sdist = { url = "https://files.pythonhosted.org/packages/4c/5b/b6ce21586237c77ce67d01dc5507039d444b630dd76611bbca2d8e5dcd91/certifi-2025.10.5.tar.gz", hash = "sha256:47c09d31ccf2acf0be3f701ea53595ee7e0b8fa08801c6624be771df09ae7b43", size = 164519, upload-time = "2025-10-05T04:12:15.808Z" }
51
+
sdist = { url = "https://files.pythonhosted.org/packages/e8/9e/c05b3920a3b7d20d3d3310465f50348e5b3694f4f88c6daf736eef3024c4/certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6", size = 160705, upload-time = "2025-04-26T02:12:29.51Z" }
52
+
wheels = [
53
+
{ url = "https://files.pythonhosted.org/packages/4a/7e/3db2bd1b1f9e95f7cddca6d6e75e2f2bd9f51b1246e546d88addca0106bd/certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3", size = 159618, upload-time = "2025-04-26T02:12:27.662Z" },
54
+
]
55
+
56
+
[[package]]
57
+
name = "cffi"
58
+
version = "1.17.1"
59
+
source = { registry = "https://pypi.org/simple" }
60
+
dependencies = [
61
+
{ name = "pycparser" },
62
+
]
63
+
sdist = { url = "https://files.pythonhosted.org/packages/fc/97/c783634659c2920c3fc70419e3af40972dbaf758daa229a7d6ea6135c90d/cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824", size = 516621, upload-time = "2024-09-04T20:45:21.852Z" }
10
64
wheels = [
11
-
{ url = "https://files.pythonhosted.org/packages/e4/37/af0d2ef3967ac0d6113837b44a4f0bfe1328c2b9763bd5b1744520e5cfed/certifi-2025.10.5-py3-none-any.whl", hash = "sha256:0f212c2744a9bb6de0c56639a6f68afe01ecd92d91f14ae897c4fe7bbeeef0de", size = 163286, upload-time = "2025-10-05T04:12:14.03Z" },
65
+
{ url = "https://files.pythonhosted.org/packages/5a/84/e94227139ee5fb4d600a7a4927f322e1d4aea6fdc50bd3fca8493caba23f/cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4", size = 183178, upload-time = "2024-09-04T20:44:12.232Z" },
66
+
{ url = "https://files.pythonhosted.org/packages/da/ee/fb72c2b48656111c4ef27f0f91da355e130a923473bf5ee75c5643d00cca/cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c", size = 178840, upload-time = "2024-09-04T20:44:13.739Z" },
67
+
{ url = "https://files.pythonhosted.org/packages/cc/b6/db007700f67d151abadf508cbfd6a1884f57eab90b1bb985c4c8c02b0f28/cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36", size = 454803, upload-time = "2024-09-04T20:44:15.231Z" },
68
+
{ url = "https://files.pythonhosted.org/packages/1a/df/f8d151540d8c200eb1c6fba8cd0dfd40904f1b0682ea705c36e6c2e97ab3/cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5", size = 478850, upload-time = "2024-09-04T20:44:17.188Z" },
69
+
{ url = "https://files.pythonhosted.org/packages/28/c0/b31116332a547fd2677ae5b78a2ef662dfc8023d67f41b2a83f7c2aa78b1/cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff", size = 485729, upload-time = "2024-09-04T20:44:18.688Z" },
70
+
{ url = "https://files.pythonhosted.org/packages/91/2b/9a1ddfa5c7f13cab007a2c9cc295b70fbbda7cb10a286aa6810338e60ea1/cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99", size = 471256, upload-time = "2024-09-04T20:44:20.248Z" },
71
+
{ url = "https://files.pythonhosted.org/packages/b2/d5/da47df7004cb17e4955df6a43d14b3b4ae77737dff8bf7f8f333196717bf/cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93", size = 479424, upload-time = "2024-09-04T20:44:21.673Z" },
72
+
{ url = "https://files.pythonhosted.org/packages/0b/ac/2a28bcf513e93a219c8a4e8e125534f4f6db03e3179ba1c45e949b76212c/cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3", size = 484568, upload-time = "2024-09-04T20:44:23.245Z" },
73
+
{ url = "https://files.pythonhosted.org/packages/d4/38/ca8a4f639065f14ae0f1d9751e70447a261f1a30fa7547a828ae08142465/cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8", size = 488736, upload-time = "2024-09-04T20:44:24.757Z" },
74
+
{ url = "https://files.pythonhosted.org/packages/86/c5/28b2d6f799ec0bdecf44dced2ec5ed43e0eb63097b0f58c293583b406582/cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65", size = 172448, upload-time = "2024-09-04T20:44:26.208Z" },
75
+
{ url = "https://files.pythonhosted.org/packages/50/b9/db34c4755a7bd1cb2d1603ac3863f22bcecbd1ba29e5ee841a4bc510b294/cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903", size = 181976, upload-time = "2024-09-04T20:44:27.578Z" },
76
+
{ url = "https://files.pythonhosted.org/packages/8d/f8/dd6c246b148639254dad4d6803eb6a54e8c85c6e11ec9df2cffa87571dbe/cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e", size = 182989, upload-time = "2024-09-04T20:44:28.956Z" },
77
+
{ url = "https://files.pythonhosted.org/packages/8b/f1/672d303ddf17c24fc83afd712316fda78dc6fce1cd53011b839483e1ecc8/cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2", size = 178802, upload-time = "2024-09-04T20:44:30.289Z" },
78
+
{ url = "https://files.pythonhosted.org/packages/0e/2d/eab2e858a91fdff70533cab61dcff4a1f55ec60425832ddfdc9cd36bc8af/cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3", size = 454792, upload-time = "2024-09-04T20:44:32.01Z" },
79
+
{ url = "https://files.pythonhosted.org/packages/75/b2/fbaec7c4455c604e29388d55599b99ebcc250a60050610fadde58932b7ee/cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683", size = 478893, upload-time = "2024-09-04T20:44:33.606Z" },
80
+
{ url = "https://files.pythonhosted.org/packages/4f/b7/6e4a2162178bf1935c336d4da8a9352cccab4d3a5d7914065490f08c0690/cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5", size = 485810, upload-time = "2024-09-04T20:44:35.191Z" },
81
+
{ url = "https://files.pythonhosted.org/packages/c7/8a/1d0e4a9c26e54746dc08c2c6c037889124d4f59dffd853a659fa545f1b40/cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4", size = 471200, upload-time = "2024-09-04T20:44:36.743Z" },
82
+
{ url = "https://files.pythonhosted.org/packages/26/9f/1aab65a6c0db35f43c4d1b4f580e8df53914310afc10ae0397d29d697af4/cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd", size = 479447, upload-time = "2024-09-04T20:44:38.492Z" },
83
+
{ url = "https://files.pythonhosted.org/packages/5f/e4/fb8b3dd8dc0e98edf1135ff067ae070bb32ef9d509d6cb0f538cd6f7483f/cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed", size = 484358, upload-time = "2024-09-04T20:44:40.046Z" },
84
+
{ url = "https://files.pythonhosted.org/packages/f1/47/d7145bf2dc04684935d57d67dff9d6d795b2ba2796806bb109864be3a151/cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9", size = 488469, upload-time = "2024-09-04T20:44:41.616Z" },
85
+
{ url = "https://files.pythonhosted.org/packages/bf/ee/f94057fa6426481d663b88637a9a10e859e492c73d0384514a17d78ee205/cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d", size = 172475, upload-time = "2024-09-04T20:44:43.733Z" },
86
+
{ url = "https://files.pythonhosted.org/packages/7c/fc/6a8cb64e5f0324877d503c854da15d76c1e50eb722e320b15345c4d0c6de/cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a", size = 182009, upload-time = "2024-09-04T20:44:45.309Z" },
12
87
]
13
88
14
89
[[package]]
15
90
name = "charset-normalizer"
16
-
version = "3.4.4"
91
+
version = "3.4.2"
92
+
source = { registry = "https://pypi.org/simple" }
93
+
sdist = { url = "https://files.pythonhosted.org/packages/e4/33/89c2ced2b67d1c2a61c19c6751aa8902d46ce3dacb23600a283619f5a12d/charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63", size = 126367, upload-time = "2025-05-02T08:34:42.01Z" }
94
+
wheels = [
95
+
{ url = "https://files.pythonhosted.org/packages/d7/a4/37f4d6035c89cac7930395a35cc0f1b872e652eaafb76a6075943754f095/charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7", size = 199936, upload-time = "2025-05-02T08:32:33.712Z" },
96
+
{ url = "https://files.pythonhosted.org/packages/ee/8a/1a5e33b73e0d9287274f899d967907cd0bf9c343e651755d9307e0dbf2b3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3", size = 143790, upload-time = "2025-05-02T08:32:35.768Z" },
97
+
{ url = "https://files.pythonhosted.org/packages/66/52/59521f1d8e6ab1482164fa21409c5ef44da3e9f653c13ba71becdd98dec3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a", size = 153924, upload-time = "2025-05-02T08:32:37.284Z" },
98
+
{ url = "https://files.pythonhosted.org/packages/86/2d/fb55fdf41964ec782febbf33cb64be480a6b8f16ded2dbe8db27a405c09f/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214", size = 146626, upload-time = "2025-05-02T08:32:38.803Z" },
99
+
{ url = "https://files.pythonhosted.org/packages/8c/73/6ede2ec59bce19b3edf4209d70004253ec5f4e319f9a2e3f2f15601ed5f7/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a", size = 148567, upload-time = "2025-05-02T08:32:40.251Z" },
100
+
{ url = "https://files.pythonhosted.org/packages/09/14/957d03c6dc343c04904530b6bef4e5efae5ec7d7990a7cbb868e4595ee30/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd", size = 150957, upload-time = "2025-05-02T08:32:41.705Z" },
101
+
{ url = "https://files.pythonhosted.org/packages/0d/c8/8174d0e5c10ccebdcb1b53cc959591c4c722a3ad92461a273e86b9f5a302/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981", size = 145408, upload-time = "2025-05-02T08:32:43.709Z" },
102
+
{ url = "https://files.pythonhosted.org/packages/58/aa/8904b84bc8084ac19dc52feb4f5952c6df03ffb460a887b42615ee1382e8/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c", size = 153399, upload-time = "2025-05-02T08:32:46.197Z" },
103
+
{ url = "https://files.pythonhosted.org/packages/c2/26/89ee1f0e264d201cb65cf054aca6038c03b1a0c6b4ae998070392a3ce605/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b", size = 156815, upload-time = "2025-05-02T08:32:48.105Z" },
104
+
{ url = "https://files.pythonhosted.org/packages/fd/07/68e95b4b345bad3dbbd3a8681737b4338ff2c9df29856a6d6d23ac4c73cb/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d", size = 154537, upload-time = "2025-05-02T08:32:49.719Z" },
105
+
{ url = "https://files.pythonhosted.org/packages/77/1a/5eefc0ce04affb98af07bc05f3bac9094513c0e23b0562d64af46a06aae4/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f", size = 149565, upload-time = "2025-05-02T08:32:51.404Z" },
106
+
{ url = "https://files.pythonhosted.org/packages/37/a0/2410e5e6032a174c95e0806b1a6585eb21e12f445ebe239fac441995226a/charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c", size = 98357, upload-time = "2025-05-02T08:32:53.079Z" },
107
+
{ url = "https://files.pythonhosted.org/packages/6c/4f/c02d5c493967af3eda9c771ad4d2bbc8df6f99ddbeb37ceea6e8716a32bc/charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e", size = 105776, upload-time = "2025-05-02T08:32:54.573Z" },
108
+
{ url = "https://files.pythonhosted.org/packages/ea/12/a93df3366ed32db1d907d7593a94f1fe6293903e3e92967bebd6950ed12c/charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0", size = 199622, upload-time = "2025-05-02T08:32:56.363Z" },
109
+
{ url = "https://files.pythonhosted.org/packages/04/93/bf204e6f344c39d9937d3c13c8cd5bbfc266472e51fc8c07cb7f64fcd2de/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf", size = 143435, upload-time = "2025-05-02T08:32:58.551Z" },
110
+
{ url = "https://files.pythonhosted.org/packages/22/2a/ea8a2095b0bafa6c5b5a55ffdc2f924455233ee7b91c69b7edfcc9e02284/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e", size = 153653, upload-time = "2025-05-02T08:33:00.342Z" },
111
+
{ url = "https://files.pythonhosted.org/packages/b6/57/1b090ff183d13cef485dfbe272e2fe57622a76694061353c59da52c9a659/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1", size = 146231, upload-time = "2025-05-02T08:33:02.081Z" },
112
+
{ url = "https://files.pythonhosted.org/packages/e2/28/ffc026b26f441fc67bd21ab7f03b313ab3fe46714a14b516f931abe1a2d8/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c", size = 148243, upload-time = "2025-05-02T08:33:04.063Z" },
113
+
{ url = "https://files.pythonhosted.org/packages/c0/0f/9abe9bd191629c33e69e47c6ef45ef99773320e9ad8e9cb08b8ab4a8d4cb/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691", size = 150442, upload-time = "2025-05-02T08:33:06.418Z" },
114
+
{ url = "https://files.pythonhosted.org/packages/67/7c/a123bbcedca91d5916c056407f89a7f5e8fdfce12ba825d7d6b9954a1a3c/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0", size = 145147, upload-time = "2025-05-02T08:33:08.183Z" },
115
+
{ url = "https://files.pythonhosted.org/packages/ec/fe/1ac556fa4899d967b83e9893788e86b6af4d83e4726511eaaad035e36595/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b", size = 153057, upload-time = "2025-05-02T08:33:09.986Z" },
116
+
{ url = "https://files.pythonhosted.org/packages/2b/ff/acfc0b0a70b19e3e54febdd5301a98b72fa07635e56f24f60502e954c461/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff", size = 156454, upload-time = "2025-05-02T08:33:11.814Z" },
117
+
{ url = "https://files.pythonhosted.org/packages/92/08/95b458ce9c740d0645feb0e96cea1f5ec946ea9c580a94adfe0b617f3573/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b", size = 154174, upload-time = "2025-05-02T08:33:13.707Z" },
118
+
{ url = "https://files.pythonhosted.org/packages/78/be/8392efc43487ac051eee6c36d5fbd63032d78f7728cb37aebcc98191f1ff/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148", size = 149166, upload-time = "2025-05-02T08:33:15.458Z" },
119
+
{ url = "https://files.pythonhosted.org/packages/44/96/392abd49b094d30b91d9fbda6a69519e95802250b777841cf3bda8fe136c/charset_normalizer-3.4.2-cp313-cp313-win32.whl", hash = "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7", size = 98064, upload-time = "2025-05-02T08:33:17.06Z" },
120
+
{ url = "https://files.pythonhosted.org/packages/e9/b0/0200da600134e001d91851ddc797809e2fe0ea72de90e09bec5a2fbdaccb/charset_normalizer-3.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980", size = 105641, upload-time = "2025-05-02T08:33:18.753Z" },
121
+
{ url = "https://files.pythonhosted.org/packages/20/94/c5790835a017658cbfabd07f3bfb549140c3ac458cfc196323996b10095a/charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0", size = 52626, upload-time = "2025-05-02T08:34:40.053Z" },
122
+
]
123
+
124
+
[[package]]
125
+
name = "click"
126
+
version = "8.2.1"
17
127
source = { registry = "https://pypi.org/simple" }
18
-
sdist = { url = "https://files.pythonhosted.org/packages/13/69/33ddede1939fdd074bce5434295f38fae7136463422fe4fd3e0e89b98062/charset_normalizer-3.4.4.tar.gz", hash = "sha256:94537985111c35f28720e43603b8e7b43a6ecfb2ce1d3058bbe955b73404e21a", size = 129418, upload-time = "2025-10-14T04:42:32.879Z" }
128
+
dependencies = [
129
+
{ name = "colorama", marker = "sys_platform == 'win32'" },
130
+
]
131
+
sdist = { url = "https://files.pythonhosted.org/packages/60/6c/8ca2efa64cf75a977a0d7fac081354553ebe483345c734fb6b6515d96bbc/click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", size = 286342, upload-time = "2025-05-20T23:19:49.832Z" }
19
132
wheels = [
20
-
{ url = "https://files.pythonhosted.org/packages/f3/85/1637cd4af66fa687396e757dec650f28025f2a2f5a5531a3208dc0ec43f2/charset_normalizer-3.4.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0a98e6759f854bd25a58a73fa88833fba3b7c491169f86ce1180c948ab3fd394", size = 208425, upload-time = "2025-10-14T04:40:53.353Z" },
21
-
{ url = "https://files.pythonhosted.org/packages/9d/6a/04130023fef2a0d9c62d0bae2649b69f7b7d8d24ea5536feef50551029df/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b5b290ccc2a263e8d185130284f8501e3e36c5e02750fc6b6bdeb2e9e96f1e25", size = 148162, upload-time = "2025-10-14T04:40:54.558Z" },
22
-
{ url = "https://files.pythonhosted.org/packages/78/29/62328d79aa60da22c9e0b9a66539feae06ca0f5a4171ac4f7dc285b83688/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74bb723680f9f7a6234dcf67aea57e708ec1fbdf5699fb91dfd6f511b0a320ef", size = 144558, upload-time = "2025-10-14T04:40:55.677Z" },
23
-
{ url = "https://files.pythonhosted.org/packages/86/bb/b32194a4bf15b88403537c2e120b817c61cd4ecffa9b6876e941c3ee38fe/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f1e34719c6ed0b92f418c7c780480b26b5d9c50349e9a9af7d76bf757530350d", size = 161497, upload-time = "2025-10-14T04:40:57.217Z" },
24
-
{ url = "https://files.pythonhosted.org/packages/19/89/a54c82b253d5b9b111dc74aca196ba5ccfcca8242d0fb64146d4d3183ff1/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:2437418e20515acec67d86e12bf70056a33abdacb5cb1655042f6538d6b085a8", size = 159240, upload-time = "2025-10-14T04:40:58.358Z" },
25
-
{ url = "https://files.pythonhosted.org/packages/c0/10/d20b513afe03acc89ec33948320a5544d31f21b05368436d580dec4e234d/charset_normalizer-3.4.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:11d694519d7f29d6cd09f6ac70028dba10f92f6cdd059096db198c283794ac86", size = 153471, upload-time = "2025-10-14T04:40:59.468Z" },
26
-
{ url = "https://files.pythonhosted.org/packages/61/fa/fbf177b55bdd727010f9c0a3c49eefa1d10f960e5f09d1d887bf93c2e698/charset_normalizer-3.4.4-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:ac1c4a689edcc530fc9d9aa11f5774b9e2f33f9a0c6a57864e90908f5208d30a", size = 150864, upload-time = "2025-10-14T04:41:00.623Z" },
27
-
{ url = "https://files.pythonhosted.org/packages/05/12/9fbc6a4d39c0198adeebbde20b619790e9236557ca59fc40e0e3cebe6f40/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:21d142cc6c0ec30d2efee5068ca36c128a30b0f2c53c1c07bd78cb6bc1d3be5f", size = 150647, upload-time = "2025-10-14T04:41:01.754Z" },
28
-
{ url = "https://files.pythonhosted.org/packages/ad/1f/6a9a593d52e3e8c5d2b167daf8c6b968808efb57ef4c210acb907c365bc4/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:5dbe56a36425d26d6cfb40ce79c314a2e4dd6211d51d6d2191c00bed34f354cc", size = 145110, upload-time = "2025-10-14T04:41:03.231Z" },
29
-
{ url = "https://files.pythonhosted.org/packages/30/42/9a52c609e72471b0fc54386dc63c3781a387bb4fe61c20231a4ebcd58bdd/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:5bfbb1b9acf3334612667b61bd3002196fe2a1eb4dd74d247e0f2a4d50ec9bbf", size = 162839, upload-time = "2025-10-14T04:41:04.715Z" },
30
-
{ url = "https://files.pythonhosted.org/packages/c4/5b/c0682bbf9f11597073052628ddd38344a3d673fda35a36773f7d19344b23/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:d055ec1e26e441f6187acf818b73564e6e6282709e9bcb5b63f5b23068356a15", size = 150667, upload-time = "2025-10-14T04:41:05.827Z" },
31
-
{ url = "https://files.pythonhosted.org/packages/e4/24/a41afeab6f990cf2daf6cb8c67419b63b48cf518e4f56022230840c9bfb2/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:af2d8c67d8e573d6de5bc30cdb27e9b95e49115cd9baad5ddbd1a6207aaa82a9", size = 160535, upload-time = "2025-10-14T04:41:06.938Z" },
32
-
{ url = "https://files.pythonhosted.org/packages/2a/e5/6a4ce77ed243c4a50a1fecca6aaaab419628c818a49434be428fe24c9957/charset_normalizer-3.4.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:780236ac706e66881f3b7f2f32dfe90507a09e67d1d454c762cf642e6e1586e0", size = 154816, upload-time = "2025-10-14T04:41:08.101Z" },
33
-
{ url = "https://files.pythonhosted.org/packages/a8/ef/89297262b8092b312d29cdb2517cb1237e51db8ecef2e9af5edbe7b683b1/charset_normalizer-3.4.4-cp312-cp312-win32.whl", hash = "sha256:5833d2c39d8896e4e19b689ffc198f08ea58116bee26dea51e362ecc7cd3ed26", size = 99694, upload-time = "2025-10-14T04:41:09.23Z" },
34
-
{ url = "https://files.pythonhosted.org/packages/3d/2d/1e5ed9dd3b3803994c155cd9aacb60c82c331bad84daf75bcb9c91b3295e/charset_normalizer-3.4.4-cp312-cp312-win_amd64.whl", hash = "sha256:a79cfe37875f822425b89a82333404539ae63dbdddf97f84dcbc3d339aae9525", size = 107131, upload-time = "2025-10-14T04:41:10.467Z" },
35
-
{ url = "https://files.pythonhosted.org/packages/d0/d9/0ed4c7098a861482a7b6a95603edce4c0d9db2311af23da1fb2b75ec26fc/charset_normalizer-3.4.4-cp312-cp312-win_arm64.whl", hash = "sha256:376bec83a63b8021bb5c8ea75e21c4ccb86e7e45ca4eb81146091b56599b80c3", size = 100390, upload-time = "2025-10-14T04:41:11.915Z" },
36
-
{ url = "https://files.pythonhosted.org/packages/97/45/4b3a1239bbacd321068ea6e7ac28875b03ab8bc0aa0966452db17cd36714/charset_normalizer-3.4.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:e1f185f86a6f3403aa2420e815904c67b2f9ebc443f045edd0de921108345794", size = 208091, upload-time = "2025-10-14T04:41:13.346Z" },
37
-
{ url = "https://files.pythonhosted.org/packages/7d/62/73a6d7450829655a35bb88a88fca7d736f9882a27eacdca2c6d505b57e2e/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b39f987ae8ccdf0d2642338faf2abb1862340facc796048b604ef14919e55ed", size = 147936, upload-time = "2025-10-14T04:41:14.461Z" },
38
-
{ url = "https://files.pythonhosted.org/packages/89/c5/adb8c8b3d6625bef6d88b251bbb0d95f8205831b987631ab0c8bb5d937c2/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3162d5d8ce1bb98dd51af660f2121c55d0fa541b46dff7bb9b9f86ea1d87de72", size = 144180, upload-time = "2025-10-14T04:41:15.588Z" },
39
-
{ url = "https://files.pythonhosted.org/packages/91/ed/9706e4070682d1cc219050b6048bfd293ccf67b3d4f5a4f39207453d4b99/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:81d5eb2a312700f4ecaa977a8235b634ce853200e828fbadf3a9c50bab278328", size = 161346, upload-time = "2025-10-14T04:41:16.738Z" },
40
-
{ url = "https://files.pythonhosted.org/packages/d5/0d/031f0d95e4972901a2f6f09ef055751805ff541511dc1252ba3ca1f80cf5/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5bd2293095d766545ec1a8f612559f6b40abc0eb18bb2f5d1171872d34036ede", size = 158874, upload-time = "2025-10-14T04:41:17.923Z" },
41
-
{ url = "https://files.pythonhosted.org/packages/f5/83/6ab5883f57c9c801ce5e5677242328aa45592be8a00644310a008d04f922/charset_normalizer-3.4.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a8a8b89589086a25749f471e6a900d3f662d1d3b6e2e59dcecf787b1cc3a1894", size = 153076, upload-time = "2025-10-14T04:41:19.106Z" },
42
-
{ url = "https://files.pythonhosted.org/packages/75/1e/5ff781ddf5260e387d6419959ee89ef13878229732732ee73cdae01800f2/charset_normalizer-3.4.4-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc7637e2f80d8530ee4a78e878bce464f70087ce73cf7c1caf142416923b98f1", size = 150601, upload-time = "2025-10-14T04:41:20.245Z" },
43
-
{ url = "https://files.pythonhosted.org/packages/d7/57/71be810965493d3510a6ca79b90c19e48696fb1ff964da319334b12677f0/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f8bf04158c6b607d747e93949aa60618b61312fe647a6369f88ce2ff16043490", size = 150376, upload-time = "2025-10-14T04:41:21.398Z" },
44
-
{ url = "https://files.pythonhosted.org/packages/e5/d5/c3d057a78c181d007014feb7e9f2e65905a6c4ef182c0ddf0de2924edd65/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:554af85e960429cf30784dd47447d5125aaa3b99a6f0683589dbd27e2f45da44", size = 144825, upload-time = "2025-10-14T04:41:22.583Z" },
45
-
{ url = "https://files.pythonhosted.org/packages/e6/8c/d0406294828d4976f275ffbe66f00266c4b3136b7506941d87c00cab5272/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:74018750915ee7ad843a774364e13a3db91682f26142baddf775342c3f5b1133", size = 162583, upload-time = "2025-10-14T04:41:23.754Z" },
46
-
{ url = "https://files.pythonhosted.org/packages/d7/24/e2aa1f18c8f15c4c0e932d9287b8609dd30ad56dbe41d926bd846e22fb8d/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:c0463276121fdee9c49b98908b3a89c39be45d86d1dbaa22957e38f6321d4ce3", size = 150366, upload-time = "2025-10-14T04:41:25.27Z" },
47
-
{ url = "https://files.pythonhosted.org/packages/e4/5b/1e6160c7739aad1e2df054300cc618b06bf784a7a164b0f238360721ab86/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:362d61fd13843997c1c446760ef36f240cf81d3ebf74ac62652aebaf7838561e", size = 160300, upload-time = "2025-10-14T04:41:26.725Z" },
48
-
{ url = "https://files.pythonhosted.org/packages/7a/10/f882167cd207fbdd743e55534d5d9620e095089d176d55cb22d5322f2afd/charset_normalizer-3.4.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9a26f18905b8dd5d685d6d07b0cdf98a79f3c7a918906af7cc143ea2e164c8bc", size = 154465, upload-time = "2025-10-14T04:41:28.322Z" },
49
-
{ url = "https://files.pythonhosted.org/packages/89/66/c7a9e1b7429be72123441bfdbaf2bc13faab3f90b933f664db506dea5915/charset_normalizer-3.4.4-cp313-cp313-win32.whl", hash = "sha256:9b35f4c90079ff2e2edc5b26c0c77925e5d2d255c42c74fdb70fb49b172726ac", size = 99404, upload-time = "2025-10-14T04:41:29.95Z" },
50
-
{ url = "https://files.pythonhosted.org/packages/c4/26/b9924fa27db384bdcd97ab83b4f0a8058d96ad9626ead570674d5e737d90/charset_normalizer-3.4.4-cp313-cp313-win_amd64.whl", hash = "sha256:b435cba5f4f750aa6c0a0d92c541fb79f69a387c91e61f1795227e4ed9cece14", size = 107092, upload-time = "2025-10-14T04:41:31.188Z" },
51
-
{ url = "https://files.pythonhosted.org/packages/af/8f/3ed4bfa0c0c72a7ca17f0380cd9e4dd842b09f664e780c13cff1dcf2ef1b/charset_normalizer-3.4.4-cp313-cp313-win_arm64.whl", hash = "sha256:542d2cee80be6f80247095cc36c418f7bddd14f4a6de45af91dfad36d817bba2", size = 100408, upload-time = "2025-10-14T04:41:32.624Z" },
52
-
{ url = "https://files.pythonhosted.org/packages/2a/35/7051599bd493e62411d6ede36fd5af83a38f37c4767b92884df7301db25d/charset_normalizer-3.4.4-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:da3326d9e65ef63a817ecbcc0df6e94463713b754fe293eaa03da99befb9a5bd", size = 207746, upload-time = "2025-10-14T04:41:33.773Z" },
53
-
{ url = "https://files.pythonhosted.org/packages/10/9a/97c8d48ef10d6cd4fcead2415523221624bf58bcf68a802721a6bc807c8f/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8af65f14dc14a79b924524b1e7fffe304517b2bff5a58bf64f30b98bbc5079eb", size = 147889, upload-time = "2025-10-14T04:41:34.897Z" },
54
-
{ url = "https://files.pythonhosted.org/packages/10/bf/979224a919a1b606c82bd2c5fa49b5c6d5727aa47b4312bb27b1734f53cd/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_armv7l.manylinux_2_17_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:74664978bb272435107de04e36db5a9735e78232b85b77d45cfb38f758efd33e", size = 143641, upload-time = "2025-10-14T04:41:36.116Z" },
55
-
{ url = "https://files.pythonhosted.org/packages/ba/33/0ad65587441fc730dc7bd90e9716b30b4702dc7b617e6ba4997dc8651495/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:752944c7ffbfdd10c074dc58ec2d5a8a4cd9493b314d367c14d24c17684ddd14", size = 160779, upload-time = "2025-10-14T04:41:37.229Z" },
56
-
{ url = "https://files.pythonhosted.org/packages/67/ed/331d6b249259ee71ddea93f6f2f0a56cfebd46938bde6fcc6f7b9a3d0e09/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d1f13550535ad8cff21b8d757a3257963e951d96e20ec82ab44bc64aeb62a191", size = 159035, upload-time = "2025-10-14T04:41:38.368Z" },
57
-
{ url = "https://files.pythonhosted.org/packages/67/ff/f6b948ca32e4f2a4576aa129d8bed61f2e0543bf9f5f2b7fc3758ed005c9/charset_normalizer-3.4.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ecaae4149d99b1c9e7b88bb03e3221956f68fd6d50be2ef061b2381b61d20838", size = 152542, upload-time = "2025-10-14T04:41:39.862Z" },
58
-
{ url = "https://files.pythonhosted.org/packages/16/85/276033dcbcc369eb176594de22728541a925b2632f9716428c851b149e83/charset_normalizer-3.4.4-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cb6254dc36b47a990e59e1068afacdcd02958bdcce30bb50cc1700a8b9d624a6", size = 149524, upload-time = "2025-10-14T04:41:41.319Z" },
59
-
{ url = "https://files.pythonhosted.org/packages/9e/f2/6a2a1f722b6aba37050e626530a46a68f74e63683947a8acff92569f979a/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:c8ae8a0f02f57a6e61203a31428fa1d677cbe50c93622b4149d5c0f319c1d19e", size = 150395, upload-time = "2025-10-14T04:41:42.539Z" },
60
-
{ url = "https://files.pythonhosted.org/packages/60/bb/2186cb2f2bbaea6338cad15ce23a67f9b0672929744381e28b0592676824/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:47cc91b2f4dd2833fddaedd2893006b0106129d4b94fdb6af1f4ce5a9965577c", size = 143680, upload-time = "2025-10-14T04:41:43.661Z" },
61
-
{ url = "https://files.pythonhosted.org/packages/7d/a5/bf6f13b772fbb2a90360eb620d52ed8f796f3c5caee8398c3b2eb7b1c60d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:82004af6c302b5d3ab2cfc4cc5f29db16123b1a8417f2e25f9066f91d4411090", size = 162045, upload-time = "2025-10-14T04:41:44.821Z" },
62
-
{ url = "https://files.pythonhosted.org/packages/df/c5/d1be898bf0dc3ef9030c3825e5d3b83f2c528d207d246cbabe245966808d/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:2b7d8f6c26245217bd2ad053761201e9f9680f8ce52f0fcd8d0755aeae5b2152", size = 149687, upload-time = "2025-10-14T04:41:46.442Z" },
63
-
{ url = "https://files.pythonhosted.org/packages/a5/42/90c1f7b9341eef50c8a1cb3f098ac43b0508413f33affd762855f67a410e/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:799a7a5e4fb2d5898c60b640fd4981d6a25f1c11790935a44ce38c54e985f828", size = 160014, upload-time = "2025-10-14T04:41:47.631Z" },
64
-
{ url = "https://files.pythonhosted.org/packages/76/be/4d3ee471e8145d12795ab655ece37baed0929462a86e72372fd25859047c/charset_normalizer-3.4.4-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:99ae2cffebb06e6c22bdc25801d7b30f503cc87dbd283479e7b606f70aff57ec", size = 154044, upload-time = "2025-10-14T04:41:48.81Z" },
65
-
{ url = "https://files.pythonhosted.org/packages/b0/6f/8f7af07237c34a1defe7defc565a9bc1807762f672c0fde711a4b22bf9c0/charset_normalizer-3.4.4-cp314-cp314-win32.whl", hash = "sha256:f9d332f8c2a2fcbffe1378594431458ddbef721c1769d78e2cbc06280d8155f9", size = 99940, upload-time = "2025-10-14T04:41:49.946Z" },
66
-
{ url = "https://files.pythonhosted.org/packages/4b/51/8ade005e5ca5b0d80fb4aff72a3775b325bdc3d27408c8113811a7cbe640/charset_normalizer-3.4.4-cp314-cp314-win_amd64.whl", hash = "sha256:8a6562c3700cce886c5be75ade4a5db4214fda19fede41d9792d100288d8f94c", size = 107104, upload-time = "2025-10-14T04:41:51.051Z" },
67
-
{ url = "https://files.pythonhosted.org/packages/da/5f/6b8f83a55bb8278772c5ae54a577f3099025f9ade59d0136ac24a0df4bde/charset_normalizer-3.4.4-cp314-cp314-win_arm64.whl", hash = "sha256:de00632ca48df9daf77a2c65a484531649261ec9f25489917f09e455cb09ddb2", size = 100743, upload-time = "2025-10-14T04:41:52.122Z" },
68
-
{ url = "https://files.pythonhosted.org/packages/0a/4c/925909008ed5a988ccbb72dcc897407e5d6d3bd72410d69e051fc0c14647/charset_normalizer-3.4.4-py3-none-any.whl", hash = "sha256:7a32c560861a02ff789ad905a2fe94e3f840803362c84fecf1851cb4cf3dc37f", size = 53402, upload-time = "2025-10-14T04:42:31.76Z" },
133
+
{ url = "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", size = 102215, upload-time = "2025-05-20T23:19:47.796Z" },
69
134
]
70
135
71
136
[[package]]
···
78
143
]
79
144
80
145
[[package]]
146
+
name = "cryptography"
147
+
version = "45.0.3"
148
+
source = { registry = "https://pypi.org/simple" }
149
+
dependencies = [
150
+
{ name = "cffi", marker = "platform_python_implementation != 'PyPy'" },
151
+
]
152
+
sdist = { url = "https://files.pythonhosted.org/packages/13/1f/9fa001e74a1993a9cadd2333bb889e50c66327b8594ac538ab8a04f915b7/cryptography-45.0.3.tar.gz", hash = "sha256:ec21313dd335c51d7877baf2972569f40a4291b76a0ce51391523ae358d05899", size = 744738, upload-time = "2025-05-25T14:17:24.777Z" }
153
+
wheels = [
154
+
{ url = "https://files.pythonhosted.org/packages/82/b2/2345dc595998caa6f68adf84e8f8b50d18e9fc4638d32b22ea8daedd4b7a/cryptography-45.0.3-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:7573d9eebaeceeb55285205dbbb8753ac1e962af3d9640791d12b36864065e71", size = 7056239, upload-time = "2025-05-25T14:16:12.22Z" },
155
+
{ url = "https://files.pythonhosted.org/packages/71/3d/ac361649a0bfffc105e2298b720d8b862330a767dab27c06adc2ddbef96a/cryptography-45.0.3-cp311-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d377dde61c5d67eb4311eace661c3efda46c62113ff56bf05e2d679e02aebb5b", size = 4205541, upload-time = "2025-05-25T14:16:14.333Z" },
156
+
{ url = "https://files.pythonhosted.org/packages/70/3e/c02a043750494d5c445f769e9c9f67e550d65060e0bfce52d91c1362693d/cryptography-45.0.3-cp311-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fae1e637f527750811588e4582988932c222f8251f7b7ea93739acb624e1487f", size = 4433275, upload-time = "2025-05-25T14:16:16.421Z" },
157
+
{ url = "https://files.pythonhosted.org/packages/40/7a/9af0bfd48784e80eef3eb6fd6fde96fe706b4fc156751ce1b2b965dada70/cryptography-45.0.3-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ca932e11218bcc9ef812aa497cdf669484870ecbcf2d99b765d6c27a86000942", size = 4209173, upload-time = "2025-05-25T14:16:18.163Z" },
158
+
{ url = "https://files.pythonhosted.org/packages/31/5f/d6f8753c8708912df52e67969e80ef70b8e8897306cd9eb8b98201f8c184/cryptography-45.0.3-cp311-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:af3f92b1dc25621f5fad065288a44ac790c5798e986a34d393ab27d2b27fcff9", size = 3898150, upload-time = "2025-05-25T14:16:20.34Z" },
159
+
{ url = "https://files.pythonhosted.org/packages/8b/50/f256ab79c671fb066e47336706dc398c3b1e125f952e07d54ce82cf4011a/cryptography-45.0.3-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:2f8f8f0b73b885ddd7f3d8c2b2234a7d3ba49002b0223f58cfde1bedd9563c56", size = 4466473, upload-time = "2025-05-25T14:16:22.605Z" },
160
+
{ url = "https://files.pythonhosted.org/packages/62/e7/312428336bb2df0848d0768ab5a062e11a32d18139447a76dfc19ada8eed/cryptography-45.0.3-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:9cc80ce69032ffa528b5e16d217fa4d8d4bb7d6ba8659c1b4d74a1b0f4235fca", size = 4211890, upload-time = "2025-05-25T14:16:24.738Z" },
161
+
{ url = "https://files.pythonhosted.org/packages/e7/53/8a130e22c1e432b3c14896ec5eb7ac01fb53c6737e1d705df7e0efb647c6/cryptography-45.0.3-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:c824c9281cb628015bfc3c59335163d4ca0540d49de4582d6c2637312907e4b1", size = 4466300, upload-time = "2025-05-25T14:16:26.768Z" },
162
+
{ url = "https://files.pythonhosted.org/packages/ba/75/6bb6579688ef805fd16a053005fce93944cdade465fc92ef32bbc5c40681/cryptography-45.0.3-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:5833bb4355cb377ebd880457663a972cd044e7f49585aee39245c0d592904578", size = 4332483, upload-time = "2025-05-25T14:16:28.316Z" },
163
+
{ url = "https://files.pythonhosted.org/packages/2f/11/2538f4e1ce05c6c4f81f43c1ef2bd6de7ae5e24ee284460ff6c77e42ca77/cryptography-45.0.3-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:9bb5bf55dcb69f7067d80354d0a348368da907345a2c448b0babc4215ccd3497", size = 4573714, upload-time = "2025-05-25T14:16:30.474Z" },
164
+
{ url = "https://files.pythonhosted.org/packages/f5/bb/e86e9cf07f73a98d84a4084e8fd420b0e82330a901d9cac8149f994c3417/cryptography-45.0.3-cp311-abi3-win32.whl", hash = "sha256:3ad69eeb92a9de9421e1f6685e85a10fbcfb75c833b42cc9bc2ba9fb00da4710", size = 2934752, upload-time = "2025-05-25T14:16:32.204Z" },
165
+
{ url = "https://files.pythonhosted.org/packages/c7/75/063bc9ddc3d1c73e959054f1fc091b79572e716ef74d6caaa56e945b4af9/cryptography-45.0.3-cp311-abi3-win_amd64.whl", hash = "sha256:97787952246a77d77934d41b62fb1b6f3581d83f71b44796a4158d93b8f5c490", size = 3412465, upload-time = "2025-05-25T14:16:33.888Z" },
166
+
{ url = "https://files.pythonhosted.org/packages/71/9b/04ead6015229a9396890d7654ee35ef630860fb42dc9ff9ec27f72157952/cryptography-45.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:c92519d242703b675ccefd0f0562eb45e74d438e001f8ab52d628e885751fb06", size = 7031892, upload-time = "2025-05-25T14:16:36.214Z" },
167
+
{ url = "https://files.pythonhosted.org/packages/46/c7/c7d05d0e133a09fc677b8a87953815c522697bdf025e5cac13ba419e7240/cryptography-45.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c5edcb90da1843df85292ef3a313513766a78fbbb83f584a5a58fb001a5a9d57", size = 4196181, upload-time = "2025-05-25T14:16:37.934Z" },
168
+
{ url = "https://files.pythonhosted.org/packages/08/7a/6ad3aa796b18a683657cef930a986fac0045417e2dc428fd336cfc45ba52/cryptography-45.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:38deed72285c7ed699864f964a3f4cf11ab3fb38e8d39cfcd96710cd2b5bb716", size = 4423370, upload-time = "2025-05-25T14:16:39.502Z" },
169
+
{ url = "https://files.pythonhosted.org/packages/4f/58/ec1461bfcb393525f597ac6a10a63938d18775b7803324072974b41a926b/cryptography-45.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5555365a50efe1f486eed6ac7062c33b97ccef409f5970a0b6f205a7cfab59c8", size = 4197839, upload-time = "2025-05-25T14:16:41.322Z" },
170
+
{ url = "https://files.pythonhosted.org/packages/d4/3d/5185b117c32ad4f40846f579369a80e710d6146c2baa8ce09d01612750db/cryptography-45.0.3-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:9e4253ed8f5948a3589b3caee7ad9a5bf218ffd16869c516535325fece163dcc", size = 3886324, upload-time = "2025-05-25T14:16:43.041Z" },
171
+
{ url = "https://files.pythonhosted.org/packages/67/85/caba91a57d291a2ad46e74016d1f83ac294f08128b26e2a81e9b4f2d2555/cryptography-45.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:cfd84777b4b6684955ce86156cfb5e08d75e80dc2585e10d69e47f014f0a5342", size = 4450447, upload-time = "2025-05-25T14:16:44.759Z" },
172
+
{ url = "https://files.pythonhosted.org/packages/ae/d1/164e3c9d559133a38279215c712b8ba38e77735d3412f37711b9f8f6f7e0/cryptography-45.0.3-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:a2b56de3417fd5f48773ad8e91abaa700b678dc7fe1e0c757e1ae340779acf7b", size = 4200576, upload-time = "2025-05-25T14:16:46.438Z" },
173
+
{ url = "https://files.pythonhosted.org/packages/71/7a/e002d5ce624ed46dfc32abe1deff32190f3ac47ede911789ee936f5a4255/cryptography-45.0.3-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:57a6500d459e8035e813bd8b51b671977fb149a8c95ed814989da682314d0782", size = 4450308, upload-time = "2025-05-25T14:16:48.228Z" },
174
+
{ url = "https://files.pythonhosted.org/packages/87/ad/3fbff9c28cf09b0a71e98af57d74f3662dea4a174b12acc493de00ea3f28/cryptography-45.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:f22af3c78abfbc7cbcdf2c55d23c3e022e1a462ee2481011d518c7fb9c9f3d65", size = 4325125, upload-time = "2025-05-25T14:16:49.844Z" },
175
+
{ url = "https://files.pythonhosted.org/packages/f5/b4/51417d0cc01802304c1984d76e9592f15e4801abd44ef7ba657060520bf0/cryptography-45.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:232954730c362638544758a8160c4ee1b832dc011d2c41a306ad8f7cccc5bb0b", size = 4560038, upload-time = "2025-05-25T14:16:51.398Z" },
176
+
{ url = "https://files.pythonhosted.org/packages/80/38/d572f6482d45789a7202fb87d052deb7a7b136bf17473ebff33536727a2c/cryptography-45.0.3-cp37-abi3-win32.whl", hash = "sha256:cb6ab89421bc90e0422aca911c69044c2912fc3debb19bb3c1bfe28ee3dff6ab", size = 2924070, upload-time = "2025-05-25T14:16:53.472Z" },
177
+
{ url = "https://files.pythonhosted.org/packages/91/5a/61f39c0ff4443651cc64e626fa97ad3099249152039952be8f344d6b0c86/cryptography-45.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:d54ae41e6bd70ea23707843021c778f151ca258081586f0cfa31d936ae43d1b2", size = 3395005, upload-time = "2025-05-25T14:16:55.134Z" },
178
+
]
179
+
180
+
[[package]]
81
181
name = "dnspython"
82
-
version = "2.8.0"
182
+
version = "2.7.0"
183
+
source = { registry = "https://pypi.org/simple" }
184
+
sdist = { url = "https://files.pythonhosted.org/packages/b5/4a/263763cb2ba3816dd94b08ad3a33d5fdae34ecb856678773cc40a3605829/dnspython-2.7.0.tar.gz", hash = "sha256:ce9c432eda0dc91cf618a5cedf1a4e142651196bbcd2c80e89ed5a907e5cfaf1", size = 345197, upload-time = "2024-10-05T20:14:59.362Z" }
185
+
wheels = [
186
+
{ url = "https://files.pythonhosted.org/packages/68/1b/e0a87d256e40e8c888847551b20a017a6b98139178505dc7ffb96f04e954/dnspython-2.7.0-py3-none-any.whl", hash = "sha256:b4c34b7d10b51bcc3a5071e7b8dee77939f1e878477eeecc965e9835f63c6c86", size = 313632, upload-time = "2024-10-05T20:14:57.687Z" },
187
+
]
188
+
189
+
[[package]]
190
+
name = "h11"
191
+
version = "0.16.0"
83
192
source = { registry = "https://pypi.org/simple" }
84
-
sdist = { url = "https://files.pythonhosted.org/packages/8c/8b/57666417c0f90f08bcafa776861060426765fdb422eb10212086fb811d26/dnspython-2.8.0.tar.gz", hash = "sha256:181d3c6996452cb1189c4046c61599b84a5a86e099562ffde77d26984ff26d0f", size = 368251, upload-time = "2025-09-07T18:58:00.022Z" }
193
+
sdist = { url = "https://files.pythonhosted.org/packages/01/ee/02a2c011bdab74c6fb3c75474d40b3052059d95df7e73351460c8588d963/h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1", size = 101250, upload-time = "2025-04-24T03:35:25.427Z" }
85
194
wheels = [
86
-
{ url = "https://files.pythonhosted.org/packages/ba/5a/18ad964b0086c6e62e2e7500f7edc89e3faa45033c71c1893d34eed2b2de/dnspython-2.8.0-py3-none-any.whl", hash = "sha256:01d9bbc4a2d76bf0db7c1f729812ded6d912bd318d3b1cf81d30c0f845dbf3af", size = 331094, upload-time = "2025-09-07T18:57:58.071Z" },
195
+
{ url = "https://files.pythonhosted.org/packages/04/4b/29cac41a4d98d144bf5f6d33995617b185d14b22401f75ca86f384e87ff1/h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86", size = 37515, upload-time = "2025-04-24T03:35:24.344Z" },
87
196
]
88
197
89
198
[[package]]
90
-
name = "grapheme"
91
-
version = "0.6.0"
199
+
name = "httpcore"
200
+
version = "1.0.9"
92
201
source = { registry = "https://pypi.org/simple" }
93
-
sdist = { url = "https://files.pythonhosted.org/packages/ce/e7/bbaab0d2a33e07c8278910c1d0d8d4f3781293dfbc70b5c38197159046bf/grapheme-0.6.0.tar.gz", hash = "sha256:44c2b9f21bbe77cfb05835fec230bd435954275267fea1858013b102f8603cca", size = 207306, upload-time = "2020-03-07T17:13:55.492Z" }
202
+
dependencies = [
203
+
{ name = "certifi" },
204
+
{ name = "h11" },
205
+
]
206
+
sdist = { url = "https://files.pythonhosted.org/packages/06/94/82699a10bca87a5556c9c59b5963f2d039dbd239f25bc2a63907a05a14cb/httpcore-1.0.9.tar.gz", hash = "sha256:6e34463af53fd2ab5d807f399a9b45ea31c3dfa2276f15a2c3f00afff6e176e8", size = 85484, upload-time = "2025-04-24T22:06:22.219Z" }
207
+
wheels = [
208
+
{ url = "https://files.pythonhosted.org/packages/7e/f5/f66802a942d491edb555dd61e3a9961140fd64c90bce1eafd741609d334d/httpcore-1.0.9-py3-none-any.whl", hash = "sha256:2d400746a40668fc9dec9810239072b40b4484b640a8c38fd654a024c7a1bf55", size = 78784, upload-time = "2025-04-24T22:06:20.566Z" },
209
+
]
94
210
95
211
[[package]]
96
-
name = "idna"
97
-
version = "3.11"
212
+
name = "httpx"
213
+
version = "0.28.1"
98
214
source = { registry = "https://pypi.org/simple" }
99
-
sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" }
215
+
dependencies = [
216
+
{ name = "anyio" },
217
+
{ name = "certifi" },
218
+
{ name = "httpcore" },
219
+
{ name = "idna" },
220
+
]
221
+
sdist = { url = "https://files.pythonhosted.org/packages/b1/df/48c586a5fe32a0f01324ee087459e112ebb7224f646c0b5023f5e79e9956/httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc", size = 141406, upload-time = "2024-12-06T15:37:23.222Z" }
100
222
wheels = [
101
-
{ url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" },
223
+
{ url = "https://files.pythonhosted.org/packages/2a/39/e50c7c3a983047577ee07d2a9e53faf5a69493943ec3f6a384bdc792deb2/httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad", size = 73517, upload-time = "2024-12-06T15:37:21.509Z" },
102
224
]
103
225
104
226
[[package]]
105
-
name = "iniconfig"
106
-
version = "2.3.0"
227
+
name = "idna"
228
+
version = "3.10"
107
229
source = { registry = "https://pypi.org/simple" }
108
-
sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" }
230
+
sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" }
109
231
wheels = [
110
-
{ url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" },
232
+
{ url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" },
111
233
]
112
234
113
235
[[package]]
114
-
name = "packaging"
115
-
version = "25.0"
236
+
name = "libipld"
237
+
version = "3.0.1"
116
238
source = { registry = "https://pypi.org/simple" }
117
-
sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" }
239
+
sdist = { url = "https://files.pythonhosted.org/packages/d4/ad/b440c64e2d1ee84f2933979175399ff09bd0ba7b1b07c6bc20ba585825cd/libipld-3.0.1.tar.gz", hash = "sha256:2970752de70e5fdcac4646900cdefaa0dca08db9b5d59c40b5496d99e3bffa64", size = 4359070, upload-time = "2025-02-18T11:19:59.924Z" }
118
240
wheels = [
119
-
{ url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" },
241
+
{ url = "https://files.pythonhosted.org/packages/b8/6b/87c3b3222a1ebc9b8654a2ec168d177e85c993a679b698f53f199b367e37/libipld-3.0.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:27313adb70ca9ecfaaa34f1ca6e45ee0569935b7ba9802f78c2f37f7a633a7dd", size = 307914, upload-time = "2025-02-18T11:18:13.449Z" },
242
+
{ url = "https://files.pythonhosted.org/packages/62/fc/9cd90e1bf5e50fa31ced3a9e4eced8b386a509f693d915ff483c320f8556/libipld-3.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bf5a14647350aa6779d634b7dc0f6967296fe52e9ca1d6132e24aa388c77c68e", size = 295778, upload-time = "2025-02-18T11:18:15.223Z" },
243
+
{ url = "https://files.pythonhosted.org/packages/9b/17/c4ee7f38d43d513935179706011aa8fa5ef70d223626477de05ae301f4ae/libipld-3.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d9e619573d500eb4a4ab4a8ef90882305fba43a5a405eb80fcc0afe5d6e9dcd", size = 675489, upload-time = "2025-02-18T11:18:16.808Z" },
244
+
{ url = "https://files.pythonhosted.org/packages/8f/93/f7ba7d2ce896a774634f3a279a0d7900ea2b76e0d93c335727b01c564fd6/libipld-3.0.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:a2fbfaed3fc98c95cd412e61e960cd41633fc880de24327613b0cb0b974d277b", size = 681145, upload-time = "2025-02-18T11:18:18.835Z" },
245
+
{ url = "https://files.pythonhosted.org/packages/92/16/c247088ec2194bfc5b5ed71059c468d1f16987696905fe9b5aaaac336521/libipld-3.0.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b36044476920455a26d30df75728eab069201c42c0af3e3610a30fd62b96ab55", size = 685159, upload-time = "2025-02-18T11:18:20.172Z" },
246
+
{ url = "https://files.pythonhosted.org/packages/e1/f3/3d0442d0bd92f2bbc5bc7259569c2886bd1398a6f090ea30cd19e8c45f00/libipld-3.0.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4586a3442f12065a64a36ae56d80c71d05a87413fbf17bae330c42793c8ecfac", size = 820381, upload-time = "2025-02-18T11:18:22.398Z" },
247
+
{ url = "https://files.pythonhosted.org/packages/c7/a7/63998349b924f0d2225ed194497d24bf088fad34fc02085fd97c4777164c/libipld-3.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d243ca7dea89e1579fd95f95ff612a7b56a980743c25e2a0b1a39cae7b67e55e", size = 681046, upload-time = "2025-02-18T11:18:23.954Z" },
248
+
{ url = "https://files.pythonhosted.org/packages/0b/5a/bdbadafe5cb3c5ae1b4e7fd1517a436d7bda8b63621f3d39af92622d905e/libipld-3.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:1525c07363abb20e8cd416df7ca316ddfc4f592ed2da694b02e0e4a4af1b9418", size = 689931, upload-time = "2025-02-18T11:18:26.868Z" },
249
+
{ url = "https://files.pythonhosted.org/packages/b1/3c/759fcc3f12e41485ef374fab202b7ba84e9f001ca821d3811ff8cd030fdf/libipld-3.0.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:372768df5540867962c3c16fe80976f8b162a9771e8fe1b2175f18dabf23b9ce", size = 849420, upload-time = "2025-02-18T11:18:28.847Z" },
250
+
{ url = "https://files.pythonhosted.org/packages/c4/ac/d697be6d9f20c5176d11193edbac70d55bdeaa70cd110a156ac87aaecaae/libipld-3.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:47bf15f9fc5890ff4807c0c5cb0ff99d625bcea3cd222aaa500d57466da529bd", size = 841270, upload-time = "2025-02-18T11:18:30.588Z" },
251
+
{ url = "https://files.pythonhosted.org/packages/6e/91/5c64cd11e2daee21c968baa6a0669a0f402ead5fc99ad78b92e06a42e4e5/libipld-3.0.1-cp312-cp312-win32.whl", hash = "sha256:989d37ae0cb31380e6b76391e0272342de830adad2821c2de7b925b360fc45f3", size = 182583, upload-time = "2025-02-18T11:18:31.775Z" },
252
+
{ url = "https://files.pythonhosted.org/packages/84/b7/37f88ada4e6fb762a71e93366c320f58995022cf8f67c4ad91d4b9a4568d/libipld-3.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:4557f20d4b8e61ac6c89ab4cea04f3a518a266f3c3d7348cf4cc8ac9b02c89dc", size = 197643, upload-time = "2025-02-18T11:18:32.86Z" },
253
+
{ url = "https://files.pythonhosted.org/packages/3a/23/184f246a3ef1f6fe9775ad27851091a3779c14657e5591f6bdbe910bfe88/libipld-3.0.1-cp312-cp312-win_arm64.whl", hash = "sha256:92ec97dac2e978f09343ebb64b0bb9bed9c294e8a224490552cfc200e9101f5c", size = 176991, upload-time = "2025-02-18T11:18:34.147Z" },
254
+
{ url = "https://files.pythonhosted.org/packages/9d/a2/28c89265a107f9e92e32e308084edd7669e3fe40acb5e21b9e5af231f627/libipld-3.0.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:2cc452e533b7af10a66134aa33a064b40e05fe51fa4a509a969342768543953f", size = 305678, upload-time = "2025-02-18T11:18:36.125Z" },
255
+
{ url = "https://files.pythonhosted.org/packages/05/41/ccb2251240547e0903a55f84bcab0de3b766297f5112c9a3519ce0c66dee/libipld-3.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:6cd8e21c0c7ee87831dc262794637cf6c47b55c55689bc917d2c3d2518221048", size = 295909, upload-time = "2025-02-18T11:18:37.246Z" },
256
+
{ url = "https://files.pythonhosted.org/packages/9b/01/93f4e7f751eaafb6e7ba2a5c2dc859eda743837f3edbd06b712a5e92e63e/libipld-3.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9de6425fc8ba0e9072c77826e66ece2dcb1d161f933cc35f2ad94470d5a304fb", size = 675461, upload-time = "2025-02-18T11:18:38.328Z" },
257
+
{ url = "https://files.pythonhosted.org/packages/5e/a7/d1ff7b19e48f814f4fc908bd0a9160d80539a0128fe9b51285af09f65625/libipld-3.0.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:23c84465181ed30760ba9483e3ae71027573903cfbadf173be9fdd44bd83d8bd", size = 681427, upload-time = "2025-02-18T11:18:39.638Z" },
258
+
{ url = "https://files.pythonhosted.org/packages/e2/42/7c3b45b9186f7f67015b0d717feeaa920ea215c51df675e27419f598ffb2/libipld-3.0.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:45052b7f9b6a61a425318ff611b115571965d00e42c2ca66dfd0c56a4f3002b4", size = 684988, upload-time = "2025-02-18T11:18:42.021Z" },
259
+
{ url = "https://files.pythonhosted.org/packages/33/02/dd30f423e8e74ba830dff5bbbd2d7f68c474e5df1d3b56fce5e59bc08a1e/libipld-3.0.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6d183c2543db326d9a4e21819ba5674ae4f1e69dcfd853c654fba471cfbbaa88", size = 820272, upload-time = "2025-02-18T11:18:46.181Z" },
260
+
{ url = "https://files.pythonhosted.org/packages/80/cd/bdd10568306ed1d71d24440e08b526ae69b93405d75a5289e0d54cf7b961/libipld-3.0.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ceb82681b6985e34609636186ac00b51105816d310ed510de1169cd65f903622", size = 680986, upload-time = "2025-02-18T11:18:48.285Z" },
261
+
{ url = "https://files.pythonhosted.org/packages/0a/20/d03eddce8c41f1f928efb37268424e336d97d2aca829bd267b1f12851759/libipld-3.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e3c71ffe0b9c182664bac3a2386e6c6580744f5aa46513d0d6823e671ab71d82", size = 689783, upload-time = "2025-02-18T11:18:49.501Z" },
262
+
{ url = "https://files.pythonhosted.org/packages/27/17/fdfcb6d0b0d7120eb3ad9361173cc6d5c24814b6ea2e7b135b3bb8d6920e/libipld-3.0.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:6ed68ff00bb8d63e18bf823eb89ec86e9f30b997c6d152a35ec6c4c8502ea080", size = 849382, upload-time = "2025-02-18T11:18:51.183Z" },
263
+
{ url = "https://files.pythonhosted.org/packages/6c/99/237d618fa6707300a60b8b4b859855e4e34dadb00233dc1e92d911166ae2/libipld-3.0.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:8d517c69b8f29acca27b0ced0ecb78f6e54f70952a35bc8f3060b628069c63ec", size = 841299, upload-time = "2025-02-18T11:18:53.398Z" },
264
+
{ url = "https://files.pythonhosted.org/packages/93/49/32c73fd530fab341bebc4e400657f5c2189a8d4d627bcdeb774eb37dd90f/libipld-3.0.1-cp313-cp313-win32.whl", hash = "sha256:21989622e02a3bd8be16e97c412af4f48b5ddf3b32f9b0da9d7c6b0724d01e91", size = 182567, upload-time = "2025-02-18T11:18:54.635Z" },
265
+
{ url = "https://files.pythonhosted.org/packages/7f/1e/ea73ea525d716ce836367daa212d4d0b1c25a89ffa281c9fee535cb99840/libipld-3.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:da81784d00597a0c9ac0a133ac820aaea60599b077778046dde4726e1a08685c", size = 196204, upload-time = "2025-02-18T11:18:55.706Z" },
266
+
{ url = "https://files.pythonhosted.org/packages/e2/ba/56e9082bdd997c41b3e58d3afb9d40cf08725cbd486f7e334538a41bc2a8/libipld-3.0.1-cp313-cp313-win_arm64.whl", hash = "sha256:d670dea8a76188e2977b5c3d780a6393bb270b0d04976436ce3afbc2cf4da516", size = 177044, upload-time = "2025-02-18T11:18:56.786Z" },
120
267
]
121
268
122
269
[[package]]
123
-
name = "pluggy"
124
-
version = "1.6.0"
270
+
name = "pycparser"
271
+
version = "2.22"
125
272
source = { registry = "https://pypi.org/simple" }
126
-
sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" }
273
+
sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736, upload-time = "2024-03-30T13:22:22.564Z" }
127
274
wheels = [
128
-
{ url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" },
275
+
{ url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552, upload-time = "2024-03-30T13:22:20.476Z" },
129
276
]
130
277
131
278
[[package]]
132
-
name = "pygments"
133
-
version = "2.19.2"
279
+
name = "pydantic"
280
+
version = "2.11.5"
134
281
source = { registry = "https://pypi.org/simple" }
135
-
sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" }
282
+
dependencies = [
283
+
{ name = "annotated-types" },
284
+
{ name = "pydantic-core" },
285
+
{ name = "typing-extensions" },
286
+
{ name = "typing-inspection" },
287
+
]
288
+
sdist = { url = "https://files.pythonhosted.org/packages/f0/86/8ce9040065e8f924d642c58e4a344e33163a07f6b57f836d0d734e0ad3fb/pydantic-2.11.5.tar.gz", hash = "sha256:7f853db3d0ce78ce8bbb148c401c2cdd6431b3473c0cdff2755c7690952a7b7a", size = 787102, upload-time = "2025-05-22T21:18:08.761Z" }
136
289
wheels = [
137
-
{ url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" },
290
+
{ url = "https://files.pythonhosted.org/packages/b5/69/831ed22b38ff9b4b64b66569f0e5b7b97cf3638346eb95a2147fdb49ad5f/pydantic-2.11.5-py3-none-any.whl", hash = "sha256:f9c26ba06f9747749ca1e5c94d6a85cb84254577553c8785576fd38fa64dc0f7", size = 444229, upload-time = "2025-05-22T21:18:06.329Z" },
138
291
]
139
292
140
293
[[package]]
141
-
name = "pytest"
142
-
version = "8.4.2"
294
+
name = "pydantic-core"
295
+
version = "2.33.2"
143
296
source = { registry = "https://pypi.org/simple" }
144
297
dependencies = [
145
-
{ name = "colorama", marker = "sys_platform == 'win32'" },
146
-
{ name = "iniconfig" },
147
-
{ name = "packaging" },
148
-
{ name = "pluggy" },
149
-
{ name = "pygments" },
298
+
{ name = "typing-extensions" },
150
299
]
151
-
sdist = { url = "https://files.pythonhosted.org/packages/a3/5c/00a0e072241553e1a7496d638deababa67c5058571567b92a7eaa258397c/pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01", size = 1519618, upload-time = "2025-09-04T14:34:22.711Z" }
300
+
sdist = { url = "https://files.pythonhosted.org/packages/ad/88/5f2260bdfae97aabf98f1778d43f69574390ad787afb646292a638c923d4/pydantic_core-2.33.2.tar.gz", hash = "sha256:7cb8bc3605c29176e1b105350d2e6474142d7c1bd1d9327c4a9bdb46bf827acc", size = 435195, upload-time = "2025-04-23T18:33:52.104Z" }
152
301
wheels = [
153
-
{ url = "https://files.pythonhosted.org/packages/a8/a4/20da314d277121d6534b3a980b29035dcd51e6744bd79075a6ce8fa4eb8d/pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79", size = 365750, upload-time = "2025-09-04T14:34:20.226Z" },
302
+
{ url = "https://files.pythonhosted.org/packages/18/8a/2b41c97f554ec8c71f2a8a5f85cb56a8b0956addfe8b0efb5b3d77e8bdc3/pydantic_core-2.33.2-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:a7ec89dc587667f22b6a0b6579c249fca9026ce7c333fc142ba42411fa243cdc", size = 2009000, upload-time = "2025-04-23T18:31:25.863Z" },
303
+
{ url = "https://files.pythonhosted.org/packages/a1/02/6224312aacb3c8ecbaa959897af57181fb6cf3a3d7917fd44d0f2917e6f2/pydantic_core-2.33.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:3c6db6e52c6d70aa0d00d45cdb9b40f0433b96380071ea80b09277dba021ddf7", size = 1847996, upload-time = "2025-04-23T18:31:27.341Z" },
304
+
{ url = "https://files.pythonhosted.org/packages/d6/46/6dcdf084a523dbe0a0be59d054734b86a981726f221f4562aed313dbcb49/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e61206137cbc65e6d5256e1166f88331d3b6238e082d9f74613b9b765fb9025", size = 1880957, upload-time = "2025-04-23T18:31:28.956Z" },
305
+
{ url = "https://files.pythonhosted.org/packages/ec/6b/1ec2c03837ac00886ba8160ce041ce4e325b41d06a034adbef11339ae422/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:eb8c529b2819c37140eb51b914153063d27ed88e3bdc31b71198a198e921e011", size = 1964199, upload-time = "2025-04-23T18:31:31.025Z" },
306
+
{ url = "https://files.pythonhosted.org/packages/2d/1d/6bf34d6adb9debd9136bd197ca72642203ce9aaaa85cfcbfcf20f9696e83/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c52b02ad8b4e2cf14ca7b3d918f3eb0ee91e63b3167c32591e57c4317e134f8f", size = 2120296, upload-time = "2025-04-23T18:31:32.514Z" },
307
+
{ url = "https://files.pythonhosted.org/packages/e0/94/2bd0aaf5a591e974b32a9f7123f16637776c304471a0ab33cf263cf5591a/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:96081f1605125ba0855dfda83f6f3df5ec90c61195421ba72223de35ccfb2f88", size = 2676109, upload-time = "2025-04-23T18:31:33.958Z" },
308
+
{ url = "https://files.pythonhosted.org/packages/f9/41/4b043778cf9c4285d59742281a769eac371b9e47e35f98ad321349cc5d61/pydantic_core-2.33.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f57a69461af2a5fa6e6bbd7a5f60d3b7e6cebb687f55106933188e79ad155c1", size = 2002028, upload-time = "2025-04-23T18:31:39.095Z" },
309
+
{ url = "https://files.pythonhosted.org/packages/cb/d5/7bb781bf2748ce3d03af04d5c969fa1308880e1dca35a9bd94e1a96a922e/pydantic_core-2.33.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:572c7e6c8bb4774d2ac88929e3d1f12bc45714ae5ee6d9a788a9fb35e60bb04b", size = 2100044, upload-time = "2025-04-23T18:31:41.034Z" },
310
+
{ url = "https://files.pythonhosted.org/packages/fe/36/def5e53e1eb0ad896785702a5bbfd25eed546cdcf4087ad285021a90ed53/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:db4b41f9bd95fbe5acd76d89920336ba96f03e149097365afe1cb092fceb89a1", size = 2058881, upload-time = "2025-04-23T18:31:42.757Z" },
311
+
{ url = "https://files.pythonhosted.org/packages/01/6c/57f8d70b2ee57fc3dc8b9610315949837fa8c11d86927b9bb044f8705419/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:fa854f5cf7e33842a892e5c73f45327760bc7bc516339fda888c75ae60edaeb6", size = 2227034, upload-time = "2025-04-23T18:31:44.304Z" },
312
+
{ url = "https://files.pythonhosted.org/packages/27/b9/9c17f0396a82b3d5cbea4c24d742083422639e7bb1d5bf600e12cb176a13/pydantic_core-2.33.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:5f483cfb75ff703095c59e365360cb73e00185e01aaea067cd19acffd2ab20ea", size = 2234187, upload-time = "2025-04-23T18:31:45.891Z" },
313
+
{ url = "https://files.pythonhosted.org/packages/b0/6a/adf5734ffd52bf86d865093ad70b2ce543415e0e356f6cacabbc0d9ad910/pydantic_core-2.33.2-cp312-cp312-win32.whl", hash = "sha256:9cb1da0f5a471435a7bc7e439b8a728e8b61e59784b2af70d7c169f8dd8ae290", size = 1892628, upload-time = "2025-04-23T18:31:47.819Z" },
314
+
{ url = "https://files.pythonhosted.org/packages/43/e4/5479fecb3606c1368d496a825d8411e126133c41224c1e7238be58b87d7e/pydantic_core-2.33.2-cp312-cp312-win_amd64.whl", hash = "sha256:f941635f2a3d96b2973e867144fde513665c87f13fe0e193c158ac51bfaaa7b2", size = 1955866, upload-time = "2025-04-23T18:31:49.635Z" },
315
+
{ url = "https://files.pythonhosted.org/packages/0d/24/8b11e8b3e2be9dd82df4b11408a67c61bb4dc4f8e11b5b0fc888b38118b5/pydantic_core-2.33.2-cp312-cp312-win_arm64.whl", hash = "sha256:cca3868ddfaccfbc4bfb1d608e2ccaaebe0ae628e1416aeb9c4d88c001bb45ab", size = 1888894, upload-time = "2025-04-23T18:31:51.609Z" },
316
+
{ url = "https://files.pythonhosted.org/packages/46/8c/99040727b41f56616573a28771b1bfa08a3d3fe74d3d513f01251f79f172/pydantic_core-2.33.2-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:1082dd3e2d7109ad8b7da48e1d4710c8d06c253cbc4a27c1cff4fbcaa97a9e3f", size = 2015688, upload-time = "2025-04-23T18:31:53.175Z" },
317
+
{ url = "https://files.pythonhosted.org/packages/3a/cc/5999d1eb705a6cefc31f0b4a90e9f7fc400539b1a1030529700cc1b51838/pydantic_core-2.33.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f517ca031dfc037a9c07e748cefd8d96235088b83b4f4ba8939105d20fa1dcd6", size = 1844808, upload-time = "2025-04-23T18:31:54.79Z" },
318
+
{ url = "https://files.pythonhosted.org/packages/6f/5e/a0a7b8885c98889a18b6e376f344da1ef323d270b44edf8174d6bce4d622/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0a9f2c9dd19656823cb8250b0724ee9c60a82f3cdf68a080979d13092a3b0fef", size = 1885580, upload-time = "2025-04-23T18:31:57.393Z" },
319
+
{ url = "https://files.pythonhosted.org/packages/3b/2a/953581f343c7d11a304581156618c3f592435523dd9d79865903272c256a/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:2b0a451c263b01acebe51895bfb0e1cc842a5c666efe06cdf13846c7418caa9a", size = 1973859, upload-time = "2025-04-23T18:31:59.065Z" },
320
+
{ url = "https://files.pythonhosted.org/packages/e6/55/f1a813904771c03a3f97f676c62cca0c0a4138654107c1b61f19c644868b/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ea40a64d23faa25e62a70ad163571c0b342b8bf66d5fa612ac0dec4f069d916", size = 2120810, upload-time = "2025-04-23T18:32:00.78Z" },
321
+
{ url = "https://files.pythonhosted.org/packages/aa/c3/053389835a996e18853ba107a63caae0b9deb4a276c6b472931ea9ae6e48/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb2d542b4d66f9470e8065c5469ec676978d625a8b7a363f07d9a501a9cb36a", size = 2676498, upload-time = "2025-04-23T18:32:02.418Z" },
322
+
{ url = "https://files.pythonhosted.org/packages/eb/3c/f4abd740877a35abade05e437245b192f9d0ffb48bbbbd708df33d3cda37/pydantic_core-2.33.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fdac5d6ffa1b5a83bca06ffe7583f5576555e6c8b3a91fbd25ea7780f825f7d", size = 2000611, upload-time = "2025-04-23T18:32:04.152Z" },
323
+
{ url = "https://files.pythonhosted.org/packages/59/a7/63ef2fed1837d1121a894d0ce88439fe3e3b3e48c7543b2a4479eb99c2bd/pydantic_core-2.33.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:04a1a413977ab517154eebb2d326da71638271477d6ad87a769102f7c2488c56", size = 2107924, upload-time = "2025-04-23T18:32:06.129Z" },
324
+
{ url = "https://files.pythonhosted.org/packages/04/8f/2551964ef045669801675f1cfc3b0d74147f4901c3ffa42be2ddb1f0efc4/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c8e7af2f4e0194c22b5b37205bfb293d166a7344a5b0d0eaccebc376546d77d5", size = 2063196, upload-time = "2025-04-23T18:32:08.178Z" },
325
+
{ url = "https://files.pythonhosted.org/packages/26/bd/d9602777e77fc6dbb0c7db9ad356e9a985825547dce5ad1d30ee04903918/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:5c92edd15cd58b3c2d34873597a1e20f13094f59cf88068adb18947df5455b4e", size = 2236389, upload-time = "2025-04-23T18:32:10.242Z" },
326
+
{ url = "https://files.pythonhosted.org/packages/42/db/0e950daa7e2230423ab342ae918a794964b053bec24ba8af013fc7c94846/pydantic_core-2.33.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:65132b7b4a1c0beded5e057324b7e16e10910c106d43675d9bd87d4f38dde162", size = 2239223, upload-time = "2025-04-23T18:32:12.382Z" },
327
+
{ url = "https://files.pythonhosted.org/packages/58/4d/4f937099c545a8a17eb52cb67fe0447fd9a373b348ccfa9a87f141eeb00f/pydantic_core-2.33.2-cp313-cp313-win32.whl", hash = "sha256:52fb90784e0a242bb96ec53f42196a17278855b0f31ac7c3cc6f5c1ec4811849", size = 1900473, upload-time = "2025-04-23T18:32:14.034Z" },
328
+
{ url = "https://files.pythonhosted.org/packages/a0/75/4a0a9bac998d78d889def5e4ef2b065acba8cae8c93696906c3a91f310ca/pydantic_core-2.33.2-cp313-cp313-win_amd64.whl", hash = "sha256:c083a3bdd5a93dfe480f1125926afcdbf2917ae714bdb80b36d34318b2bec5d9", size = 1955269, upload-time = "2025-04-23T18:32:15.783Z" },
329
+
{ url = "https://files.pythonhosted.org/packages/f9/86/1beda0576969592f1497b4ce8e7bc8cbdf614c352426271b1b10d5f0aa64/pydantic_core-2.33.2-cp313-cp313-win_arm64.whl", hash = "sha256:e80b087132752f6b3d714f041ccf74403799d3b23a72722ea2e6ba2e892555b9", size = 1893921, upload-time = "2025-04-23T18:32:18.473Z" },
330
+
{ url = "https://files.pythonhosted.org/packages/a4/7d/e09391c2eebeab681df2b74bfe6c43422fffede8dc74187b2b0bf6fd7571/pydantic_core-2.33.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:61c18fba8e5e9db3ab908620af374db0ac1baa69f0f32df4f61ae23f15e586ac", size = 1806162, upload-time = "2025-04-23T18:32:20.188Z" },
331
+
{ url = "https://files.pythonhosted.org/packages/f1/3d/847b6b1fed9f8ed3bb95a9ad04fbd0b212e832d4f0f50ff4d9ee5a9f15cf/pydantic_core-2.33.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:95237e53bb015f67b63c91af7518a62a8660376a6a0db19b89acc77a4d6199f5", size = 1981560, upload-time = "2025-04-23T18:32:22.354Z" },
332
+
{ url = "https://files.pythonhosted.org/packages/6f/9a/e73262f6c6656262b5fdd723ad90f518f579b7bc8622e43a942eec53c938/pydantic_core-2.33.2-cp313-cp313t-win_amd64.whl", hash = "sha256:c2fc0a768ef76c15ab9238afa6da7f69895bb5d1ee83aeea2e3509af4472d0b9", size = 1935777, upload-time = "2025-04-23T18:32:25.088Z" },
154
333
]
155
334
156
335
[[package]]
···
164
343
165
344
[[package]]
166
345
name = "requests"
167
-
version = "2.32.5"
346
+
version = "2.32.3"
168
347
source = { registry = "https://pypi.org/simple" }
169
348
dependencies = [
170
349
{ name = "certifi" },
···
172
351
{ name = "idna" },
173
352
{ name = "urllib3" },
174
353
]
175
-
sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" }
354
+
sdist = { url = "https://files.pythonhosted.org/packages/63/70/2bf7780ad2d390a8d301ad0b550f1581eadbd9a20f896afe06353c2a2913/requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760", size = 131218, upload-time = "2024-05-29T15:37:49.536Z" }
355
+
wheels = [
356
+
{ url = "https://files.pythonhosted.org/packages/f9/9b/335f9764261e915ed497fcdeb11df5dfd6f7bf257d4a6a2a686d80da4d54/requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6", size = 64928, upload-time = "2024-05-29T15:37:47.027Z" },
357
+
]
358
+
359
+
[[package]]
360
+
name = "sniffio"
361
+
version = "1.3.1"
362
+
source = { registry = "https://pypi.org/simple" }
363
+
sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" }
364
+
wheels = [
365
+
{ url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" },
366
+
]
367
+
368
+
[[package]]
369
+
name = "typing-extensions"
370
+
version = "4.14.0"
371
+
source = { registry = "https://pypi.org/simple" }
372
+
sdist = { url = "https://files.pythonhosted.org/packages/d1/bc/51647cd02527e87d05cb083ccc402f93e441606ff1f01739a62c8ad09ba5/typing_extensions-4.14.0.tar.gz", hash = "sha256:8676b788e32f02ab42d9e7c61324048ae4c6d844a399eebace3d4979d75ceef4", size = 107423, upload-time = "2025-06-02T14:52:11.399Z" }
373
+
wheels = [
374
+
{ url = "https://files.pythonhosted.org/packages/69/e0/552843e0d356fbb5256d21449fa957fa4eff3bbc135a74a691ee70c7c5da/typing_extensions-4.14.0-py3-none-any.whl", hash = "sha256:a1514509136dd0b477638fc68d6a91497af5076466ad0fa6c338e44e359944af", size = 43839, upload-time = "2025-06-02T14:52:10.026Z" },
375
+
]
376
+
377
+
[[package]]
378
+
name = "typing-inspection"
379
+
version = "0.4.1"
380
+
source = { registry = "https://pypi.org/simple" }
381
+
dependencies = [
382
+
{ name = "typing-extensions" },
383
+
]
384
+
sdist = { url = "https://files.pythonhosted.org/packages/f8/b1/0c11f5058406b3af7609f121aaa6b609744687f1d158b3c3a5bf4cc94238/typing_inspection-0.4.1.tar.gz", hash = "sha256:6ae134cc0203c33377d43188d4064e9b357dba58cff3185f22924610e70a9d28", size = 75726, upload-time = "2025-05-21T18:55:23.885Z" }
176
385
wheels = [
177
-
{ url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" },
386
+
{ url = "https://files.pythonhosted.org/packages/17/69/cd203477f944c353c31bade965f880aa1061fd6bf05ded0726ca845b6ff7/typing_inspection-0.4.1-py3-none-any.whl", hash = "sha256:389055682238f53b04f7badcb49b989835495a96700ced5dab2d8feae4b26f51", size = 14552, upload-time = "2025-05-21T18:55:22.152Z" },
178
387
]
179
388
180
389
[[package]]
181
390
name = "urllib3"
182
-
version = "2.5.0"
391
+
version = "2.4.0"
183
392
source = { registry = "https://pypi.org/simple" }
184
-
sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" }
393
+
sdist = { url = "https://files.pythonhosted.org/packages/8a/78/16493d9c386d8e60e442a35feac5e00f0913c0f4b7c217c11e8ec2ff53e0/urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466", size = 390672, upload-time = "2025-04-10T15:23:39.232Z" }
185
394
wheels = [
186
-
{ url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" },
395
+
{ url = "https://files.pythonhosted.org/packages/6b/11/cc635220681e93a0183390e26485430ca2c7b5f9d33b15c74c2861cb8091/urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813", size = 128680, upload-time = "2025-04-10T15:23:37.377Z" },
187
396
]
188
397
189
398
[[package]]
190
399
name = "websockets"
191
-
version = "15.0.1"
400
+
version = "13.1"
192
401
source = { registry = "https://pypi.org/simple" }
193
-
sdist = { url = "https://files.pythonhosted.org/packages/21/e6/26d09fab466b7ca9c7737474c52be4f76a40301b08362eb2dbc19dcc16c1/websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee", size = 177016, upload-time = "2025-03-05T20:03:41.606Z" }
402
+
sdist = { url = "https://files.pythonhosted.org/packages/e2/73/9223dbc7be3dcaf2a7bbf756c351ec8da04b1fa573edaf545b95f6b0c7fd/websockets-13.1.tar.gz", hash = "sha256:a3b3366087c1bc0a2795111edcadddb8b3b59509d5db5d7ea3fdd69f954a8878", size = 158549, upload-time = "2024-09-21T17:34:21.54Z" }
194
403
wheels = [
195
-
{ url = "https://files.pythonhosted.org/packages/51/6b/4545a0d843594f5d0771e86463606a3988b5a09ca5123136f8a76580dd63/websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3", size = 175437, upload-time = "2025-03-05T20:02:16.706Z" },
196
-
{ url = "https://files.pythonhosted.org/packages/f4/71/809a0f5f6a06522af902e0f2ea2757f71ead94610010cf570ab5c98e99ed/websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665", size = 173096, upload-time = "2025-03-05T20:02:18.832Z" },
197
-
{ url = "https://files.pythonhosted.org/packages/3d/69/1a681dd6f02180916f116894181eab8b2e25b31e484c5d0eae637ec01f7c/websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2", size = 173332, upload-time = "2025-03-05T20:02:20.187Z" },
198
-
{ url = "https://files.pythonhosted.org/packages/a6/02/0073b3952f5bce97eafbb35757f8d0d54812b6174ed8dd952aa08429bcc3/websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215", size = 183152, upload-time = "2025-03-05T20:02:22.286Z" },
199
-
{ url = "https://files.pythonhosted.org/packages/74/45/c205c8480eafd114b428284840da0b1be9ffd0e4f87338dc95dc6ff961a1/websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5", size = 182096, upload-time = "2025-03-05T20:02:24.368Z" },
200
-
{ url = "https://files.pythonhosted.org/packages/14/8f/aa61f528fba38578ec553c145857a181384c72b98156f858ca5c8e82d9d3/websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65", size = 182523, upload-time = "2025-03-05T20:02:25.669Z" },
201
-
{ url = "https://files.pythonhosted.org/packages/ec/6d/0267396610add5bc0d0d3e77f546d4cd287200804fe02323797de77dbce9/websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe", size = 182790, upload-time = "2025-03-05T20:02:26.99Z" },
202
-
{ url = "https://files.pythonhosted.org/packages/02/05/c68c5adbf679cf610ae2f74a9b871ae84564462955d991178f95a1ddb7dd/websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4", size = 182165, upload-time = "2025-03-05T20:02:30.291Z" },
203
-
{ url = "https://files.pythonhosted.org/packages/29/93/bb672df7b2f5faac89761cb5fa34f5cec45a4026c383a4b5761c6cea5c16/websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597", size = 182160, upload-time = "2025-03-05T20:02:31.634Z" },
204
-
{ url = "https://files.pythonhosted.org/packages/ff/83/de1f7709376dc3ca9b7eeb4b9a07b4526b14876b6d372a4dc62312bebee0/websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9", size = 176395, upload-time = "2025-03-05T20:02:33.017Z" },
205
-
{ url = "https://files.pythonhosted.org/packages/7d/71/abf2ebc3bbfa40f391ce1428c7168fb20582d0ff57019b69ea20fa698043/websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7", size = 176841, upload-time = "2025-03-05T20:02:34.498Z" },
206
-
{ url = "https://files.pythonhosted.org/packages/cb/9f/51f0cf64471a9d2b4d0fc6c534f323b664e7095640c34562f5182e5a7195/websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee443ef070bb3b6ed74514f5efaa37a252af57c90eb33b956d35c8e9c10a1931", size = 175440, upload-time = "2025-03-05T20:02:36.695Z" },
207
-
{ url = "https://files.pythonhosted.org/packages/8a/05/aa116ec9943c718905997412c5989f7ed671bc0188ee2ba89520e8765d7b/websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5a939de6b7b4e18ca683218320fc67ea886038265fd1ed30173f5ce3f8e85675", size = 173098, upload-time = "2025-03-05T20:02:37.985Z" },
208
-
{ url = "https://files.pythonhosted.org/packages/ff/0b/33cef55ff24f2d92924923c99926dcce78e7bd922d649467f0eda8368923/websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:746ee8dba912cd6fc889a8147168991d50ed70447bf18bcda7039f7d2e3d9151", size = 173329, upload-time = "2025-03-05T20:02:39.298Z" },
209
-
{ url = "https://files.pythonhosted.org/packages/31/1d/063b25dcc01faa8fada1469bdf769de3768b7044eac9d41f734fd7b6ad6d/websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:595b6c3969023ecf9041b2936ac3827e4623bfa3ccf007575f04c5a6aa318c22", size = 183111, upload-time = "2025-03-05T20:02:40.595Z" },
210
-
{ url = "https://files.pythonhosted.org/packages/93/53/9a87ee494a51bf63e4ec9241c1ccc4f7c2f45fff85d5bde2ff74fcb68b9e/websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c714d2fc58b5ca3e285461a4cc0c9a66bd0e24c5da9911e30158286c9b5be7f", size = 182054, upload-time = "2025-03-05T20:02:41.926Z" },
211
-
{ url = "https://files.pythonhosted.org/packages/ff/b2/83a6ddf56cdcbad4e3d841fcc55d6ba7d19aeb89c50f24dd7e859ec0805f/websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f3c1e2ab208db911594ae5b4f79addeb3501604a165019dd221c0bdcabe4db8", size = 182496, upload-time = "2025-03-05T20:02:43.304Z" },
212
-
{ url = "https://files.pythonhosted.org/packages/98/41/e7038944ed0abf34c45aa4635ba28136f06052e08fc2168520bb8b25149f/websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:229cf1d3ca6c1804400b0a9790dc66528e08a6a1feec0d5040e8b9eb14422375", size = 182829, upload-time = "2025-03-05T20:02:48.812Z" },
213
-
{ url = "https://files.pythonhosted.org/packages/e0/17/de15b6158680c7623c6ef0db361da965ab25d813ae54fcfeae2e5b9ef910/websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:756c56e867a90fb00177d530dca4b097dd753cde348448a1012ed6c5131f8b7d", size = 182217, upload-time = "2025-03-05T20:02:50.14Z" },
214
-
{ url = "https://files.pythonhosted.org/packages/33/2b/1f168cb6041853eef0362fb9554c3824367c5560cbdaad89ac40f8c2edfc/websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4", size = 182195, upload-time = "2025-03-05T20:02:51.561Z" },
215
-
{ url = "https://files.pythonhosted.org/packages/86/eb/20b6cdf273913d0ad05a6a14aed4b9a85591c18a987a3d47f20fa13dcc47/websockets-15.0.1-cp313-cp313-win32.whl", hash = "sha256:ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa", size = 176393, upload-time = "2025-03-05T20:02:53.814Z" },
216
-
{ url = "https://files.pythonhosted.org/packages/1b/6c/c65773d6cab416a64d191d6ee8a8b1c68a09970ea6909d16965d26bfed1e/websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561", size = 176837, upload-time = "2025-03-05T20:02:55.237Z" },
217
-
{ url = "https://files.pythonhosted.org/packages/fa/a8/5b41e0da817d64113292ab1f8247140aac61cbf6cfd085d6a0fa77f4984f/websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f", size = 169743, upload-time = "2025-03-05T20:03:39.41Z" },
404
+
{ url = "https://files.pythonhosted.org/packages/df/46/c426282f543b3c0296cf964aa5a7bb17e984f58dde23460c3d39b3148fcf/websockets-13.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:9d75baf00138f80b48f1eac72ad1535aac0b6461265a0bcad391fc5aba875cfc", size = 157821, upload-time = "2024-09-21T17:32:56.442Z" },
405
+
{ url = "https://files.pythonhosted.org/packages/aa/85/22529867010baac258da7c45848f9415e6cf37fef00a43856627806ffd04/websockets-13.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:9b6f347deb3dcfbfde1c20baa21c2ac0751afaa73e64e5b693bb2b848efeaa49", size = 155480, upload-time = "2024-09-21T17:32:57.698Z" },
406
+
{ url = "https://files.pythonhosted.org/packages/29/2c/bdb339bfbde0119a6e84af43ebf6275278698a2241c2719afc0d8b0bdbf2/websockets-13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:de58647e3f9c42f13f90ac7e5f58900c80a39019848c5547bc691693098ae1bd", size = 155715, upload-time = "2024-09-21T17:32:59.429Z" },
407
+
{ url = "https://files.pythonhosted.org/packages/9f/d0/8612029ea04c5c22bf7af2fd3d63876c4eaeef9b97e86c11972a43aa0e6c/websockets-13.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1b54689e38d1279a51d11e3467dd2f3a50f5f2e879012ce8f2d6943f00e83f0", size = 165647, upload-time = "2024-09-21T17:33:00.495Z" },
408
+
{ url = "https://files.pythonhosted.org/packages/56/04/1681ed516fa19ca9083f26d3f3a302257e0911ba75009533ed60fbb7b8d1/websockets-13.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf1781ef73c073e6b0f90af841aaf98501f975d306bbf6221683dd594ccc52b6", size = 164592, upload-time = "2024-09-21T17:33:02.223Z" },
409
+
{ url = "https://files.pythonhosted.org/packages/38/6f/a96417a49c0ed132bb6087e8e39a37db851c70974f5c724a4b2a70066996/websockets-13.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d23b88b9388ed85c6faf0e74d8dec4f4d3baf3ecf20a65a47b836d56260d4b9", size = 165012, upload-time = "2024-09-21T17:33:03.288Z" },
410
+
{ url = "https://files.pythonhosted.org/packages/40/8b/fccf294919a1b37d190e86042e1a907b8f66cff2b61e9befdbce03783e25/websockets-13.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3c78383585f47ccb0fcf186dcb8a43f5438bd7d8f47d69e0b56f71bf431a0a68", size = 165311, upload-time = "2024-09-21T17:33:04.728Z" },
411
+
{ url = "https://files.pythonhosted.org/packages/c1/61/f8615cf7ce5fe538476ab6b4defff52beb7262ff8a73d5ef386322d9761d/websockets-13.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d6d300f8ec35c24025ceb9b9019ae9040c1ab2f01cddc2bcc0b518af31c75c14", size = 164692, upload-time = "2024-09-21T17:33:05.829Z" },
412
+
{ url = "https://files.pythonhosted.org/packages/5c/f1/a29dd6046d3a722d26f182b783a7997d25298873a14028c4760347974ea3/websockets-13.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a9dcaf8b0cc72a392760bb8755922c03e17a5a54e08cca58e8b74f6902b433cf", size = 164686, upload-time = "2024-09-21T17:33:06.823Z" },
413
+
{ url = "https://files.pythonhosted.org/packages/0f/99/ab1cdb282f7e595391226f03f9b498f52109d25a2ba03832e21614967dfa/websockets-13.1-cp312-cp312-win32.whl", hash = "sha256:2f85cf4f2a1ba8f602298a853cec8526c2ca42a9a4b947ec236eaedb8f2dc80c", size = 158712, upload-time = "2024-09-21T17:33:07.877Z" },
414
+
{ url = "https://files.pythonhosted.org/packages/46/93/e19160db48b5581feac8468330aa11b7292880a94a37d7030478596cc14e/websockets-13.1-cp312-cp312-win_amd64.whl", hash = "sha256:38377f8b0cdeee97c552d20cf1865695fcd56aba155ad1b4ca8779a5b6ef4ac3", size = 159145, upload-time = "2024-09-21T17:33:09.202Z" },
415
+
{ url = "https://files.pythonhosted.org/packages/51/20/2b99ca918e1cbd33c53db2cace5f0c0cd8296fc77558e1908799c712e1cd/websockets-13.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:a9ab1e71d3d2e54a0aa646ab6d4eebfaa5f416fe78dfe4da2839525dc5d765c6", size = 157828, upload-time = "2024-09-21T17:33:10.987Z" },
416
+
{ url = "https://files.pythonhosted.org/packages/b8/47/0932a71d3d9c0e9483174f60713c84cee58d62839a143f21a2bcdbd2d205/websockets-13.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b9d7439d7fab4dce00570bb906875734df13d9faa4b48e261c440a5fec6d9708", size = 155487, upload-time = "2024-09-21T17:33:12.153Z" },
417
+
{ url = "https://files.pythonhosted.org/packages/a9/60/f1711eb59ac7a6c5e98e5637fef5302f45b6f76a2c9d64fd83bbb341377a/websockets-13.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:327b74e915cf13c5931334c61e1a41040e365d380f812513a255aa804b183418", size = 155721, upload-time = "2024-09-21T17:33:13.909Z" },
418
+
{ url = "https://files.pythonhosted.org/packages/6a/e6/ba9a8db7f9d9b0e5f829cf626ff32677f39824968317223605a6b419d445/websockets-13.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:325b1ccdbf5e5725fdcb1b0e9ad4d2545056479d0eee392c291c1bf76206435a", size = 165609, upload-time = "2024-09-21T17:33:14.967Z" },
419
+
{ url = "https://files.pythonhosted.org/packages/c1/22/4ec80f1b9c27a0aebd84ccd857252eda8418ab9681eb571b37ca4c5e1305/websockets-13.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:346bee67a65f189e0e33f520f253d5147ab76ae42493804319b5716e46dddf0f", size = 164556, upload-time = "2024-09-21T17:33:17.113Z" },
420
+
{ url = "https://files.pythonhosted.org/packages/27/ac/35f423cb6bb15600438db80755609d27eda36d4c0b3c9d745ea12766c45e/websockets-13.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:91a0fa841646320ec0d3accdff5b757b06e2e5c86ba32af2e0815c96c7a603c5", size = 164993, upload-time = "2024-09-21T17:33:18.168Z" },
421
+
{ url = "https://files.pythonhosted.org/packages/31/4e/98db4fd267f8be9e52e86b6ee4e9aa7c42b83452ea0ea0672f176224b977/websockets-13.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:18503d2c5f3943e93819238bf20df71982d193f73dcecd26c94514f417f6b135", size = 165360, upload-time = "2024-09-21T17:33:19.233Z" },
422
+
{ url = "https://files.pythonhosted.org/packages/3f/15/3f0de7cda70ffc94b7e7024544072bc5b26e2c1eb36545291abb755d8cdb/websockets-13.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:a9cd1af7e18e5221d2878378fbc287a14cd527fdd5939ed56a18df8a31136bb2", size = 164745, upload-time = "2024-09-21T17:33:20.361Z" },
423
+
{ url = "https://files.pythonhosted.org/packages/a1/6e/66b6b756aebbd680b934c8bdbb6dcb9ce45aad72cde5f8a7208dbb00dd36/websockets-13.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:70c5be9f416aa72aab7a2a76c90ae0a4fe2755c1816c153c1a2bcc3333ce4ce6", size = 164732, upload-time = "2024-09-21T17:33:23.103Z" },
424
+
{ url = "https://files.pythonhosted.org/packages/35/c6/12e3aab52c11aeb289e3dbbc05929e7a9d90d7a9173958477d3ef4f8ce2d/websockets-13.1-cp313-cp313-win32.whl", hash = "sha256:624459daabeb310d3815b276c1adef475b3e6804abaf2d9d2c061c319f7f187d", size = 158709, upload-time = "2024-09-21T17:33:24.196Z" },
425
+
{ url = "https://files.pythonhosted.org/packages/41/d8/63d6194aae711d7263df4498200c690a9c39fb437ede10f3e157a6343e0d/websockets-13.1-cp313-cp313-win_amd64.whl", hash = "sha256:c518e84bb59c2baae725accd355c8dc517b4a3ed8db88b4bc93c78dae2974bf2", size = 159144, upload-time = "2024-09-21T17:33:25.96Z" },
426
+
{ url = "https://files.pythonhosted.org/packages/56/27/96a5cd2626d11c8280656c6c71d8ab50fe006490ef9971ccd154e0c42cd2/websockets-13.1-py3-none-any.whl", hash = "sha256:a9a396a6ad26130cdae92ae10c36af09d9bfe6cafe69670fd3b6da9b07b4044f", size = 152134, upload-time = "2024-09-21T17:34:19.904Z" },
218
427
]
219
428
220
429
[[package]]
221
430
name = "xpost"
222
-
version = "0.1.0"
431
+
version = "0.0.3"
223
432
source = { virtual = "." }
224
433
dependencies = [
225
-
{ name = "dnspython" },
226
-
{ name = "grapheme" },
434
+
{ name = "atproto" },
435
+
{ name = "click" },
227
436
{ name = "python-magic" },
228
437
{ name = "requests" },
229
438
{ name = "websockets" },
230
439
]
231
440
232
-
[package.dev-dependencies]
233
-
dev = [
234
-
{ name = "pytest" },
235
-
]
236
-
237
441
[package.metadata]
238
442
requires-dist = [
239
-
{ name = "dnspython", specifier = ">=2.8.0" },
240
-
{ name = "grapheme", specifier = ">=0.6.0" },
443
+
{ name = "atproto", specifier = ">=0.0.61" },
444
+
{ name = "click", specifier = ">=8.2.1" },
241
445
{ name = "python-magic", specifier = ">=0.4.27" },
242
-
{ name = "requests", specifier = ">=2.32.5" },
243
-
{ name = "websockets", specifier = ">=15.0.1" },
446
+
{ name = "requests", specifier = ">=2.32.3" },
447
+
{ name = "websockets", specifier = ">=13.1" },
244
448
]
245
-
246
-
[package.metadata.requires-dev]
247
-
dev = [{ name = "pytest", specifier = ">=8.4.2" }]