Compare commits
292 Commits
plugin-fil
...
master
Author | SHA1 | Date | |
---|---|---|---|
b1f2d16af0 | |||
|
472fb9f6ac | ||
|
65be63fdd2 | ||
|
c218c8cf61 | ||
|
b8714cc6b9 | ||
|
49adb9b441 | ||
|
09a0efbf19 | ||
|
861d81d2a6 | ||
|
91f214819a | ||
|
299d8f68c3 | ||
|
a7f31f6175 | ||
|
4f68e20ff7 | ||
|
7759643e93 | ||
|
2c60342cc6 | ||
|
a62f064e1c | ||
|
3f2887d67f | ||
|
4184280d4e | ||
|
0c72e6fb1e | ||
|
202c2836b2 | ||
|
75879cfb93 | ||
|
923125f793 | ||
|
b619d3ad56 | ||
|
dd69c337d2 | ||
|
92736baefd | ||
|
8f40a0b292 | ||
|
61f154876d | ||
|
ed046bcbfe | ||
|
a4253eceb2 | ||
|
96d4e434a8 | ||
|
2c36e8265b | ||
|
36829e7d0d | ||
|
b4e8e5bfbb | ||
|
bf8ae9eb5a | ||
|
184d6b8eed | ||
|
28b0412c8c | ||
|
c2648be1e3 | ||
|
aaf32d8820 | ||
|
c9282b93f4 | ||
|
719c9c5dd0 | ||
|
d4bb502def | ||
|
def923d444 | ||
|
7156594858 | ||
|
e73869bb19 | ||
|
c0c7b58a3f | ||
|
955fb2723e | ||
|
41cbf6d788 | ||
|
f0e2bb3d62 | ||
|
bf3a3b65d8 | ||
|
f87e4ca7b9 | ||
|
ada4978879 | ||
|
e99a13a391 | ||
|
6fd8f7ed00 | ||
|
535b6672a6 | ||
|
a46edac3b7 | ||
|
7ec58c6347 | ||
|
e6242735d3 | ||
|
55ffe54fa0 | ||
|
db40201463 | ||
|
6e7f1a8710 | ||
|
d2cef271c1 | ||
|
2a83376e9b | ||
|
8f5b9e3802 | ||
|
a21b106c71 | ||
|
6f20151a89 | ||
|
cab88d153b | ||
|
7904888233 | ||
|
fbacad8676 | ||
|
d574f84079 | ||
|
33d7892e13 | ||
|
1fe53b4c56 | ||
|
305faa0583 | ||
|
bfdd52502a | ||
|
30e67111b1 | ||
|
75574c267b | ||
|
4f4d7bc342 | ||
|
47d7e2b483 | ||
|
47d499dac8 | ||
|
e0daeeafc6 | ||
|
6ec653c69f | ||
|
f74a67dd79 | ||
|
0663b680ab | ||
|
cc5f9141fc | ||
|
d4face0150 | ||
|
5baab99957 | ||
|
982a20beb0 | ||
|
67e1921503 | ||
|
744d370f0b | ||
|
5201cb0316 | ||
|
9b37b99dd7 | ||
|
1b298c0dbb | ||
|
ca7c4df349 | ||
|
98c33e1469 | ||
|
8474cc7d6e | ||
|
818362c820 | ||
|
1932ff9995 | ||
|
cecda7c32b | ||
|
800114b837 | ||
|
55b7730b9e | ||
|
106905fd00 | ||
|
7532f4523a | ||
|
e9e325a871 | ||
|
484749c08c | ||
|
0aef57b5d6 | ||
|
59f51f8705 | ||
|
1aaa8f2fb6 | ||
|
18fa1b29a7 | ||
|
acf24a6fd5 | ||
|
93352b6e62 | ||
|
6e14cbf5dc | ||
|
32688372fe | ||
|
4685c492a9 | ||
|
adc892aa0d | ||
|
4d8e1475e6 | ||
|
4b234e4d34 | ||
|
29b4a3c892 | ||
|
c4f9a3bdf5 | ||
|
fe8658ebc1 | ||
|
91e7d80b29 | ||
|
58fedc7e8c | ||
|
0779b38b7a | ||
|
a14cbf9381 | ||
|
54a0cdbaf2 | ||
|
8c6628470e | ||
|
37c492cbf3 | ||
|
21ed971d2f | ||
|
068e268c63 | ||
|
6257979e7c | ||
|
a2e6d06cdc | ||
|
7679a0e97c | ||
|
6f57c88e4a | ||
|
339971731e | ||
|
ccb310b6d8 | ||
|
21fe57e87f | ||
|
05e5bbfb10 | ||
|
3e8e034a5a | ||
|
a1a00ff655 | ||
|
26c1d6571c | ||
|
7e06eadfd0 | ||
|
e8862ed1a3 | ||
|
df2f2a896a | ||
|
33bc777bfd | ||
|
d87158be50 | ||
|
4fafdee3a8 | ||
|
66d227ecdb | ||
|
6d4c2e174f | ||
|
aef1a976bf | ||
|
c179fd3aeb | ||
|
3f46dfdfd9 | ||
|
f78da96d63 | ||
|
c9677731e3 | ||
|
0efb798fec | ||
|
ca7a980081 | ||
|
f2bae18c7a | ||
|
c8ae7cd9f5 | ||
|
d4a5de1d4f | ||
|
8b1685a9ea | ||
|
fc516d78b7 | ||
|
a699acc049 | ||
|
e1a1f7b65e | ||
|
14d410739c | ||
|
aad5a9d18d | ||
|
3585ed9456 | ||
|
ae7e9693fc | ||
|
b8ef048a3a | ||
|
47969e5661 | ||
|
80c8aba8f6 | ||
|
2800e967b8 | ||
|
85e5ea401c | ||
|
8c3e3a3255 | ||
|
61711e8329 | ||
|
7c9668d8bc | ||
|
c685eb5e08 | ||
|
84152b1c28 | ||
|
92b3481828 | ||
|
274b5465f6 | ||
|
026fdacb59 | ||
|
47a8db6b60 | ||
|
adf28312df | ||
|
0a47d1929a | ||
|
ebd4ae9f55 | ||
|
99ec961ba3 | ||
|
d619e50973 | ||
|
188c937f60 | ||
|
26c2b6bae5 | ||
|
6db417f95c | ||
|
172a3333af | ||
|
8ecd2abba0 | ||
|
3570d7185d | ||
|
35c9592f55 | ||
|
6efce38ce1 | ||
|
4b4f8571d2 | ||
|
643d122af0 | ||
|
235ce89df1 | ||
|
6e66d0ea42 | ||
|
cb6cb6d372 | ||
|
0dfc2ccd5b | ||
|
3a5f2ede91 | ||
|
6487a7a8a5 | ||
|
8f806a2594 | ||
|
b7f6c08b2b | ||
|
ac2f234d4e | ||
|
dcd2c3b5de | ||
|
a685448887 | ||
|
b2018d9ae9 | ||
|
543f8dbf78 | ||
|
92a14f20f6 | ||
|
e26a2abf58 | ||
|
2f7c20dffd | ||
|
e5ec9b179e | ||
|
dc46b81812 | ||
|
13aaedfad9 | ||
|
bc416d71c2 | ||
|
a078bdd120 | ||
|
60d163f6c5 | ||
|
639fcda5d8 | ||
|
6d04058be2 | ||
|
368928f948 | ||
|
f0a3fa354d | ||
|
e7606751eb | ||
|
c55d05e517 | ||
|
7688083792 | ||
|
cfa69a89ce | ||
|
f4d85bae2b | ||
|
f2a2c0f2bf | ||
|
cfa831c463 | ||
|
ba4fc26ea7 | ||
|
e3aba7f26d | ||
|
6d8a05feaf | ||
|
965b3c428a | ||
|
76ae71b133 | ||
|
c22e8e0f63 | ||
|
ef30e59dc5 | ||
|
c145234c4a | ||
|
8109726444 | ||
|
9d3a4b16bd | ||
|
c614f8c8ca | ||
|
bc166e7975 | ||
|
4225b55fe3 | ||
|
7015c6711c | ||
|
ba74044e76 | ||
|
69d7a4341b | ||
|
4e767a10e4 | ||
|
03a1fdaaf5 | ||
|
09108f6a73 | ||
|
9578883bbb | ||
|
37eeb73c47 | ||
|
f52f8988f4 | ||
|
a70aa5cd0a | ||
|
14efaf13df | ||
|
1692bf7597 | ||
|
512a680678 | ||
|
3836ff59bf | ||
|
346b1c9dac | ||
|
6960382aeb | ||
|
341b3db99e | ||
|
98eeb6a808 | ||
|
6b5dedb29f | ||
|
4b88971b3b | ||
|
593f2ae1d8 | ||
|
6e075c88fa | ||
|
79fe991daf | ||
|
1214c54c35 | ||
|
2881344489 | ||
|
8409b2a5bb | ||
|
92bfe37436 | ||
|
aac1f267d7 | ||
|
6a74a360ca | ||
|
798f5da5de | ||
|
4aada15bbe | ||
|
c4aee0d1eb | ||
|
4c2d198387 | ||
|
d2afc7cee4 | ||
|
946c346481 | ||
|
155526bc14 | ||
|
f36827f9cc | ||
|
265307a870 | ||
|
ad93d8347e | ||
|
9a777ee451 | ||
|
293b44729c | ||
|
aaf97b4f94 | ||
|
ec58900cf2 | ||
|
fd379b0108 | ||
|
342ab32f3a | ||
|
3621ddc190 | ||
|
d874f43df9 | ||
|
f65f4c540b | ||
|
7f64d21e1e | ||
|
6b586f0b82 | ||
|
3aadf0c2a9 | ||
|
3b603a2180 | ||
|
9d587e32f2 | ||
|
3c7b4fc09c |
5
.dockerignore
Normal file
5
.dockerignore
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
.editorconfig
|
||||||
|
.codeclimate.yml
|
||||||
|
*.png
|
||||||
|
.venv
|
||||||
|
maubot/management/frontend/node_modules
|
@ -14,3 +14,6 @@ indent_size = 2
|
|||||||
|
|
||||||
[spec.yaml]
|
[spec.yaml]
|
||||||
indent_size = 2
|
indent_size = 2
|
||||||
|
|
||||||
|
[CHANGELOG.md]
|
||||||
|
max_line_length = 80
|
||||||
|
26
.github/workflows/python-lint.yml
vendored
Normal file
26
.github/workflows/python-lint.yml
vendored
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
name: Python lint
|
||||||
|
|
||||||
|
on: [push, pull_request]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
lint:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v4
|
||||||
|
- uses: actions/setup-python@v5
|
||||||
|
with:
|
||||||
|
python-version: "3.12"
|
||||||
|
- uses: isort/isort-action@master
|
||||||
|
with:
|
||||||
|
sortPaths: "./maubot"
|
||||||
|
- uses: psf/black@stable
|
||||||
|
with:
|
||||||
|
src: "./maubot"
|
||||||
|
version: "24.2.0"
|
||||||
|
- name: pre-commit
|
||||||
|
run: |
|
||||||
|
pip install pre-commit
|
||||||
|
pre-commit run -av trailing-whitespace
|
||||||
|
pre-commit run -av end-of-file-fixer
|
||||||
|
pre-commit run -av check-yaml
|
||||||
|
pre-commit run -av check-added-large-files
|
5
.gitignore
vendored
5
.gitignore
vendored
@ -7,10 +7,13 @@ pip-selfcheck.json
|
|||||||
*.pyc
|
*.pyc
|
||||||
__pycache__
|
__pycache__
|
||||||
|
|
||||||
*.db
|
*.db*
|
||||||
|
*.log
|
||||||
/*.yaml
|
/*.yaml
|
||||||
!example-config.yaml
|
!example-config.yaml
|
||||||
|
!.pre-commit-config.yaml
|
||||||
|
|
||||||
|
/start
|
||||||
logs/
|
logs/
|
||||||
plugins/
|
plugins/
|
||||||
trash/
|
trash/
|
||||||
|
29
.gitlab-ci-plugin.yml
Normal file
29
.gitlab-ci-plugin.yml
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
image: dock.mau.dev/maubot/maubot
|
||||||
|
|
||||||
|
stages:
|
||||||
|
- build
|
||||||
|
|
||||||
|
variables:
|
||||||
|
PYTHONPATH: /opt/maubot
|
||||||
|
|
||||||
|
build:
|
||||||
|
stage: build
|
||||||
|
except:
|
||||||
|
- tags
|
||||||
|
script:
|
||||||
|
- python3 -m maubot.cli build -o xyz.maubot.$CI_PROJECT_NAME-$CI_COMMIT_REF_NAME-$CI_COMMIT_SHORT_SHA.mbp
|
||||||
|
artifacts:
|
||||||
|
paths:
|
||||||
|
- "*.mbp"
|
||||||
|
expire_in: 365 days
|
||||||
|
|
||||||
|
build tags:
|
||||||
|
stage: build
|
||||||
|
only:
|
||||||
|
- tags
|
||||||
|
script:
|
||||||
|
- python3 -m maubot.cli build -o xyz.maubot.$CI_PROJECT_NAME-$CI_COMMIT_TAG.mbp
|
||||||
|
artifacts:
|
||||||
|
paths:
|
||||||
|
- "*.mbp"
|
||||||
|
expire_in: never
|
110
.gitlab-ci.yml
110
.gitlab-ci.yml
@ -1,67 +1,75 @@
|
|||||||
image: docker:stable
|
image: docker:stable
|
||||||
|
|
||||||
stages:
|
stages:
|
||||||
|
- build frontend
|
||||||
- build
|
- build
|
||||||
- push
|
- manifest
|
||||||
|
|
||||||
default:
|
default:
|
||||||
before_script:
|
before_script:
|
||||||
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
|
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
|
||||||
|
|
||||||
build:
|
build frontend:
|
||||||
stage: build
|
image: node:20-alpine
|
||||||
|
stage: build frontend
|
||||||
|
before_script: []
|
||||||
|
variables:
|
||||||
|
NODE_ENV: "production"
|
||||||
|
cache:
|
||||||
|
paths:
|
||||||
|
- maubot/management/frontend/node_modules
|
||||||
script:
|
script:
|
||||||
|
- cd maubot/management/frontend
|
||||||
|
- yarn --prod
|
||||||
|
- yarn build
|
||||||
|
- mv build ../../../frontend
|
||||||
|
artifacts:
|
||||||
|
paths:
|
||||||
|
- frontend
|
||||||
|
expire_in: 1 hour
|
||||||
|
|
||||||
|
build amd64:
|
||||||
|
stage: build
|
||||||
|
tags:
|
||||||
|
- amd64
|
||||||
|
script:
|
||||||
|
- echo maubot/management/frontend >> .dockerignore
|
||||||
- docker pull $CI_REGISTRY_IMAGE:latest || true
|
- docker pull $CI_REGISTRY_IMAGE:latest || true
|
||||||
- docker build --pull --cache-from $CI_REGISTRY_IMAGE:latest --tag $CI_REGISTRY_IMAGE:$CI_COMMIT_SHA .
|
- docker build --pull --cache-from $CI_REGISTRY_IMAGE:latest --tag $CI_REGISTRY_IMAGE:$CI_COMMIT_SHA-amd64 . -f Dockerfile.ci
|
||||||
- docker push $CI_REGISTRY_IMAGE:$CI_COMMIT_SHA
|
- docker push $CI_REGISTRY_IMAGE:$CI_COMMIT_SHA-amd64
|
||||||
|
- docker rmi $CI_REGISTRY_IMAGE:$CI_COMMIT_SHA-amd64
|
||||||
|
|
||||||
push latest:
|
build arm64:
|
||||||
stage: push
|
|
||||||
only:
|
|
||||||
- master
|
|
||||||
variables:
|
|
||||||
GIT_STRATEGY: none
|
|
||||||
script:
|
|
||||||
- docker pull $CI_REGISTRY_IMAGE:$CI_COMMIT_SHA
|
|
||||||
- docker tag $CI_REGISTRY_IMAGE:$CI_COMMIT_SHA $CI_REGISTRY_IMAGE:latest
|
|
||||||
- docker push $CI_REGISTRY_IMAGE:latest
|
|
||||||
|
|
||||||
push tag:
|
|
||||||
stage: push
|
|
||||||
variables:
|
|
||||||
GIT_STRATEGY: none
|
|
||||||
except:
|
|
||||||
- master
|
|
||||||
script:
|
|
||||||
- docker pull $CI_REGISTRY_IMAGE:$CI_COMMIT_SHA
|
|
||||||
- docker tag $CI_REGISTRY_IMAGE:$CI_COMMIT_SHA $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_NAME
|
|
||||||
- docker push $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_NAME
|
|
||||||
|
|
||||||
build standalone:
|
|
||||||
stage: build
|
stage: build
|
||||||
|
tags:
|
||||||
|
- arm64
|
||||||
|
script:
|
||||||
|
- echo maubot/management/frontend >> .dockerignore
|
||||||
|
- docker pull $CI_REGISTRY_IMAGE:latest || true
|
||||||
|
- docker build --pull --cache-from $CI_REGISTRY_IMAGE:latest --tag $CI_REGISTRY_IMAGE:$CI_COMMIT_SHA-arm64 . -f Dockerfile.ci
|
||||||
|
- docker push $CI_REGISTRY_IMAGE:$CI_COMMIT_SHA-arm64
|
||||||
|
- docker rmi $CI_REGISTRY_IMAGE:$CI_COMMIT_SHA-arm64
|
||||||
|
|
||||||
|
manifest:
|
||||||
|
stage: manifest
|
||||||
|
before_script:
|
||||||
|
- "mkdir -p $HOME/.docker && echo '{\"experimental\": \"enabled\"}' > $HOME/.docker/config.json"
|
||||||
|
- docker login -u $CI_REGISTRY_USER -p $CI_REGISTRY_PASSWORD $CI_REGISTRY
|
||||||
|
script:
|
||||||
|
- docker pull $CI_REGISTRY_IMAGE:$CI_COMMIT_SHA-amd64
|
||||||
|
- docker pull $CI_REGISTRY_IMAGE:$CI_COMMIT_SHA-arm64
|
||||||
|
- if [ "$CI_COMMIT_BRANCH" = "master" ]; then docker manifest create $CI_REGISTRY_IMAGE:latest $CI_REGISTRY_IMAGE:$CI_COMMIT_SHA-amd64 $CI_REGISTRY_IMAGE:$CI_COMMIT_SHA-arm64 && docker manifest push $CI_REGISTRY_IMAGE:latest; fi
|
||||||
|
- if [ "$CI_COMMIT_BRANCH" != "master" ]; then docker manifest create $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_NAME $CI_REGISTRY_IMAGE:$CI_COMMIT_SHA-amd64 $CI_REGISTRY_IMAGE:$CI_COMMIT_SHA-arm64 && docker manifest push $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_NAME; fi
|
||||||
|
- docker rmi $CI_REGISTRY_IMAGE:$CI_COMMIT_SHA-amd64 $CI_REGISTRY_IMAGE:$CI_COMMIT_SHA-arm64
|
||||||
|
|
||||||
|
|
||||||
|
build standalone amd64:
|
||||||
|
stage: build
|
||||||
|
tags:
|
||||||
|
- amd64
|
||||||
script:
|
script:
|
||||||
- docker pull $CI_REGISTRY_IMAGE:standalone || true
|
- docker pull $CI_REGISTRY_IMAGE:standalone || true
|
||||||
- docker build --pull --cache-from $CI_REGISTRY_IMAGE:standalone --tag $CI_REGISTRY_IMAGE:$CI_COMMIT_SHA-standalone . -f maubot/standalone/Dockerfile
|
- docker build --pull --cache-from $CI_REGISTRY_IMAGE:standalone --tag $CI_REGISTRY_IMAGE:$CI_COMMIT_SHA-standalone . -f maubot/standalone/Dockerfile
|
||||||
- docker push $CI_REGISTRY_IMAGE:$CI_COMMIT_SHA-standalone
|
- if [ "$CI_COMMIT_BRANCH" = "master" ]; then docker tag $CI_REGISTRY_IMAGE:$CI_COMMIT_SHA-standalone $CI_REGISTRY_IMAGE:standalone && docker push $CI_REGISTRY_IMAGE:standalone; fi
|
||||||
|
- if [ "$CI_COMMIT_BRANCH" != "master" ]; then docker tag $CI_REGISTRY_IMAGE:$CI_COMMIT_SHA-standalone $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_NAME-standalone && docker push $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_NAME-standalone; fi
|
||||||
push latest standalone:
|
- docker rmi $CI_REGISTRY_IMAGE:standalone $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_NAME-standalone $CI_REGISTRY_IMAGE:$CI_COMMIT_SHA-standalone || true
|
||||||
stage: push
|
|
||||||
only:
|
|
||||||
- master
|
|
||||||
variables:
|
|
||||||
GIT_STRATEGY: none
|
|
||||||
script:
|
|
||||||
- docker pull $CI_REGISTRY_IMAGE:$CI_COMMIT_SHA-standalone
|
|
||||||
- docker tag $CI_REGISTRY_IMAGE:$CI_COMMIT_SHA-standalone $CI_REGISTRY_IMAGE:standalone
|
|
||||||
- docker push $CI_REGISTRY_IMAGE:standalone
|
|
||||||
|
|
||||||
push tag standalone:
|
|
||||||
stage: push
|
|
||||||
variables:
|
|
||||||
GIT_STRATEGY: none
|
|
||||||
except:
|
|
||||||
- master
|
|
||||||
script:
|
|
||||||
- docker pull $CI_REGISTRY_IMAGE:$CI_COMMIT_SHA-standalone
|
|
||||||
- docker tag $CI_REGISTRY_IMAGE:$CI_COMMIT_SHA-standalone $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_NAME-standalone
|
|
||||||
- docker push $CI_REGISTRY_IMAGE:$CI_COMMIT_REF_NAME-standalone
|
|
||||||
|
20
.pre-commit-config.yaml
Normal file
20
.pre-commit-config.yaml
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
repos:
|
||||||
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
|
rev: v4.5.0
|
||||||
|
hooks:
|
||||||
|
- id: trailing-whitespace
|
||||||
|
exclude_types: [markdown]
|
||||||
|
- id: end-of-file-fixer
|
||||||
|
- id: check-yaml
|
||||||
|
- id: check-added-large-files
|
||||||
|
- repo: https://github.com/psf/black
|
||||||
|
rev: 24.2.0
|
||||||
|
hooks:
|
||||||
|
- id: black
|
||||||
|
language_version: python3
|
||||||
|
files: ^maubot/.*\.pyi?$
|
||||||
|
- repo: https://github.com/PyCQA/isort
|
||||||
|
rev: 5.13.2
|
||||||
|
hooks:
|
||||||
|
- id: isort
|
||||||
|
files: ^maubot/.*\.pyi?$
|
137
CHANGELOG.md
Normal file
137
CHANGELOG.md
Normal file
@ -0,0 +1,137 @@
|
|||||||
|
# v0.5.0 (2024-08-24)
|
||||||
|
|
||||||
|
* Dropped Python 3.9 support.
|
||||||
|
* Updated Docker image to Alpine 3.20.
|
||||||
|
* Updated mautrix-python to 0.20.6 to support authenticated media.
|
||||||
|
* Removed hard dependency on SQLAlchemy.
|
||||||
|
* Fixed `main_class` to default to being loaded from the last module instead of
|
||||||
|
the first if a module name is not explicitly specified.
|
||||||
|
* This was already the [documented behavior](https://docs.mau.fi/maubot/dev/reference/plugin-metadata.html),
|
||||||
|
and loading from the first module doesn't make sense due to import order.
|
||||||
|
* Added simple scheduler utility for running background tasks periodically or
|
||||||
|
after a certain delay.
|
||||||
|
* Added testing framework for plugins (thanks to [@abompard] in [#225]).
|
||||||
|
* Changed `mbc build` to ignore directories declared in `modules` that are
|
||||||
|
missing an `__init__.py` file.
|
||||||
|
* Importing the modules at runtime would fail and break the plugin.
|
||||||
|
To include non-code resources outside modules in the mbp archive,
|
||||||
|
use `extra_files` instead.
|
||||||
|
|
||||||
|
[#225]: https://github.com/maubot/maubot/issues/225
|
||||||
|
[@abompard]: https://github.com/abompard
|
||||||
|
|
||||||
|
# v0.4.2 (2023-09-20)
|
||||||
|
|
||||||
|
* Updated Pillow to 10.0.1.
|
||||||
|
* Updated Docker image to Alpine 3.18.
|
||||||
|
* Added logging for errors for /whoami errors when adding new bot accounts.
|
||||||
|
* Added support for using appservice tokens (including appservice encryption)
|
||||||
|
in standalone mode.
|
||||||
|
|
||||||
|
# v0.4.1 (2023-03-15)
|
||||||
|
|
||||||
|
* Added `in_thread` parameter to `evt.reply()` and `evt.respond()`.
|
||||||
|
* By default, responses will go to the thread if the command is in a thread.
|
||||||
|
* By setting the flag to `True` or `False`, the plugin can force the response
|
||||||
|
to either be or not be in a thread.
|
||||||
|
* Fixed static files like the frontend app manifest not being served correctly.
|
||||||
|
* Fixed `self.loader.meta` not being available to plugins in standalone mode.
|
||||||
|
* Updated to mautrix-python v0.19.6.
|
||||||
|
|
||||||
|
# v0.4.0 (2023-01-29)
|
||||||
|
|
||||||
|
* Dropped support for using a custom maubot API base path.
|
||||||
|
* The public URL can still have a path prefix, e.g. when using a reverse
|
||||||
|
proxy. Both the web interface and `mbc` CLI tool should work fine with
|
||||||
|
custom prefixes.
|
||||||
|
* Added `evt.redact()` as a shortcut for `self.client.redact(evt.room_id, evt.event_id)`.
|
||||||
|
* Fixed `mbc logs` command not working on Python 3.8+.
|
||||||
|
* Fixed saving plugin configs (broke in v0.3.0).
|
||||||
|
* Fixed SSO login using the wrong API path (probably broke in v0.3.0).
|
||||||
|
* Stopped using `cd` in the docker image's `mbc` wrapper to enable using
|
||||||
|
path-dependent commands like `mbc build` by mounting a directory.
|
||||||
|
* Updated Docker image to Alpine 3.17.
|
||||||
|
|
||||||
|
# v0.3.1 (2022-03-29)
|
||||||
|
|
||||||
|
* Added encryption dependencies to standalone dockerfile.
|
||||||
|
* Fixed running without encryption dependencies installed.
|
||||||
|
* Removed unnecessary imports that broke on SQLAlchemy 1.4+.
|
||||||
|
* Removed unused alembic dependency.
|
||||||
|
|
||||||
|
# v0.3.0 (2022-03-28)
|
||||||
|
|
||||||
|
* Dropped Python 3.7 support.
|
||||||
|
* Switched main maubot database to asyncpg/aiosqlite.
|
||||||
|
* Using the same SQLite database for crypto is now safe again.
|
||||||
|
* Added support for asyncpg/aiosqlite for plugin databases.
|
||||||
|
* There are some [basic docs](https://docs.mau.fi/maubot/dev/database/index.html)
|
||||||
|
and [a simple example](./examples/database) for the new system.
|
||||||
|
* The old SQLAlchemy system is now deprecated, but will be preserved for
|
||||||
|
backwards-compatibility until most plugins have updated.
|
||||||
|
* Started enforcing minimum maubot version in plugins.
|
||||||
|
* Trying to upload a plugin where the specified version is higher than the
|
||||||
|
running maubot version will fail.
|
||||||
|
* Fixed bug where uploading a plugin twice, deleting it and trying to upload
|
||||||
|
again would fail.
|
||||||
|
* Updated Docker image to Alpine 3.15.
|
||||||
|
* Formatted all code using [black](https://github.com/psf/black)
|
||||||
|
and [isort](https://github.com/PyCQA/isort).
|
||||||
|
|
||||||
|
# v0.2.1 (2021-11-22)
|
||||||
|
|
||||||
|
Docker-only release: added automatic moving of plugin databases from
|
||||||
|
`/data/plugins/*.db` to `/data/dbs`
|
||||||
|
|
||||||
|
# v0.2.0 (2021-11-20)
|
||||||
|
|
||||||
|
* Moved plugin databases from `/data/plugins` to `/data/dbs` in the docker image.
|
||||||
|
* v0.2.0 was missing the automatic migration of databases, it was added in v0.2.1.
|
||||||
|
* If you were using a custom path, you'll have to mount it at `/data/dbs` or
|
||||||
|
move the databases yourself.
|
||||||
|
* Removed support for pickle crypto store and added support for SQLite crypto store.
|
||||||
|
* **If you were previously using the dangerous pickle store for e2ee, you'll
|
||||||
|
have to re-login with the bots (which can now be done conveniently with
|
||||||
|
`mbc auth --update-client`).**
|
||||||
|
* Added SSO support to `mbc auth`.
|
||||||
|
* Added support for setting device ID for e2ee using the web interface.
|
||||||
|
* Added e2ee fingerprint field to the web interface.
|
||||||
|
* Added `--update-client` flag to store access token inside maubot instead of
|
||||||
|
returning it in `mbc auth`.
|
||||||
|
* This will also automatically store the device ID now.
|
||||||
|
* Updated standalone mode.
|
||||||
|
* Added e2ee and web server support.
|
||||||
|
* It's now officially supported and [somewhat documented](https://docs.mau.fi/maubot/usage/standalone.html).
|
||||||
|
* Replaced `_` with `-` when generating command name from function name.
|
||||||
|
* Replaced unmaintained PyInquirer dependency with questionary
|
||||||
|
(thanks to [@TinfoilSubmarine] in [#139]).
|
||||||
|
* Updated Docker image to Alpine 3.14.
|
||||||
|
* Fixed avatar URLs without the `mxc://` prefix appearing like they work in the
|
||||||
|
frontend, but not actually working when saved.
|
||||||
|
|
||||||
|
[@TinfoilSubmarine]: https://github.com/TinfoilSubmarine
|
||||||
|
[#139]: https://github.com/maubot/maubot/pull/139
|
||||||
|
|
||||||
|
# v0.1.2 (2021-06-12)
|
||||||
|
|
||||||
|
* Added `loader` instance property for plugins to allow reading files within
|
||||||
|
the plugin archive.
|
||||||
|
* Added support for reloading `webapp` and `database` meta flags in plugins.
|
||||||
|
Previously you had to restart maubot instead of just reloading the plugin
|
||||||
|
when enabling the webapp or database for the first time.
|
||||||
|
* Added warning log if a plugin uses `@web` decorators without enabling the
|
||||||
|
`webapp` meta flag.
|
||||||
|
* Updated frontend to latest React and dependency versions.
|
||||||
|
* Updated Docker image to Alpine 3.13.
|
||||||
|
* Fixed registering accounts with Synapse shared secret registration.
|
||||||
|
* Fixed plugins using `get_event` in encrypted rooms.
|
||||||
|
* Fixed using the `@command.new` decorator without specifying a name
|
||||||
|
(i.e. falling back to the function name).
|
||||||
|
|
||||||
|
# v0.1.1 (2021-05-02)
|
||||||
|
|
||||||
|
No changelog.
|
||||||
|
|
||||||
|
# v0.1.0 (2020-10-04)
|
||||||
|
|
||||||
|
Initial tagged release.
|
59
Dockerfile
59
Dockerfile
@ -1,37 +1,60 @@
|
|||||||
FROM node:12 AS frontend-builder
|
FROM node:20 AS frontend-builder
|
||||||
|
|
||||||
COPY ./maubot/management/frontend /frontend
|
COPY ./maubot/management/frontend /frontend
|
||||||
RUN cd /frontend && yarn --prod && yarn build
|
RUN cd /frontend && yarn --prod && yarn build
|
||||||
|
|
||||||
FROM alpine:3.10
|
FROM alpine:3.20
|
||||||
|
|
||||||
ENV UID=1337 \
|
|
||||||
GID=1337
|
|
||||||
|
|
||||||
COPY . /opt/maubot
|
|
||||||
COPY --from=frontend-builder /frontend/build /opt/maubot/frontend
|
|
||||||
WORKDIR /opt/maubot
|
|
||||||
RUN apk add --no-cache \
|
RUN apk add --no-cache \
|
||||||
|
python3 py3-pip py3-setuptools py3-wheel \
|
||||||
|
ca-certificates \
|
||||||
|
su-exec \
|
||||||
|
yq \
|
||||||
py3-aiohttp \
|
py3-aiohttp \
|
||||||
py3-sqlalchemy \
|
|
||||||
py3-attrs \
|
py3-attrs \
|
||||||
py3-bcrypt \
|
py3-bcrypt \
|
||||||
py3-cffi \
|
py3-cffi \
|
||||||
build-base \
|
|
||||||
python3-dev \
|
|
||||||
ca-certificates \
|
|
||||||
su-exec \
|
|
||||||
py3-pillow \
|
|
||||||
py3-magic \
|
|
||||||
py3-psycopg2 \
|
|
||||||
py3-ruamel.yaml \
|
py3-ruamel.yaml \
|
||||||
py3-jinja2 \
|
py3-jinja2 \
|
||||||
py3-click \
|
py3-click \
|
||||||
py3-packaging \
|
py3-packaging \
|
||||||
py3-markdown \
|
py3-markdown \
|
||||||
&& pip3 install -r requirements.txt feedparser dateparser langdetect
|
py3-alembic \
|
||||||
# TODO remove pillow, magic and feedparser when maubot supports installing dependencies
|
py3-cssselect \
|
||||||
|
py3-commonmark \
|
||||||
|
py3-pygments \
|
||||||
|
py3-tz \
|
||||||
|
py3-regex \
|
||||||
|
py3-wcwidth \
|
||||||
|
# encryption
|
||||||
|
py3-cffi \
|
||||||
|
py3-olm \
|
||||||
|
py3-pycryptodome \
|
||||||
|
py3-unpaddedbase64 \
|
||||||
|
py3-future \
|
||||||
|
# plugin deps
|
||||||
|
py3-pillow \
|
||||||
|
py3-magic \
|
||||||
|
py3-feedparser \
|
||||||
|
py3-dateutil \
|
||||||
|
py3-lxml \
|
||||||
|
py3-semver
|
||||||
|
# TODO remove pillow, magic, feedparser, lxml, gitlab and semver when maubot supports installing dependencies
|
||||||
|
|
||||||
|
COPY requirements.txt /opt/maubot/requirements.txt
|
||||||
|
COPY optional-requirements.txt /opt/maubot/optional-requirements.txt
|
||||||
|
WORKDIR /opt/maubot
|
||||||
|
RUN apk add --virtual .build-deps python3-dev build-base git \
|
||||||
|
&& pip3 install --break-system-packages -r requirements.txt -r optional-requirements.txt \
|
||||||
|
dateparser langdetect python-gitlab pyquery tzlocal \
|
||||||
|
&& apk del .build-deps
|
||||||
|
# TODO also remove dateparser, langdetect and pyquery when maubot supports installing dependencies
|
||||||
|
|
||||||
|
COPY . /opt/maubot
|
||||||
|
RUN cp maubot/example-config.yaml .
|
||||||
|
COPY ./docker/mbc.sh /usr/local/bin/mbc
|
||||||
|
COPY --from=frontend-builder /frontend/build /opt/maubot/frontend
|
||||||
|
ENV UID=1337 GID=1337 XDG_CONFIG_HOME=/data
|
||||||
VOLUME /data
|
VOLUME /data
|
||||||
|
|
||||||
CMD ["/opt/maubot/docker/run.sh"]
|
CMD ["/opt/maubot/docker/run.sh"]
|
||||||
|
55
Dockerfile.ci
Normal file
55
Dockerfile.ci
Normal file
@ -0,0 +1,55 @@
|
|||||||
|
FROM alpine:3.20
|
||||||
|
|
||||||
|
RUN apk add --no-cache \
|
||||||
|
python3 py3-pip py3-setuptools py3-wheel \
|
||||||
|
ca-certificates \
|
||||||
|
su-exec \
|
||||||
|
yq \
|
||||||
|
py3-aiohttp \
|
||||||
|
py3-attrs \
|
||||||
|
py3-bcrypt \
|
||||||
|
py3-cffi \
|
||||||
|
py3-ruamel.yaml \
|
||||||
|
py3-jinja2 \
|
||||||
|
py3-click \
|
||||||
|
py3-packaging \
|
||||||
|
py3-markdown \
|
||||||
|
py3-alembic \
|
||||||
|
# py3-cssselect \
|
||||||
|
py3-commonmark \
|
||||||
|
py3-pygments \
|
||||||
|
py3-tz \
|
||||||
|
# py3-tzlocal \
|
||||||
|
py3-regex \
|
||||||
|
py3-wcwidth \
|
||||||
|
# encryption
|
||||||
|
py3-cffi \
|
||||||
|
py3-olm \
|
||||||
|
py3-pycryptodome \
|
||||||
|
py3-unpaddedbase64 \
|
||||||
|
py3-future \
|
||||||
|
# plugin deps
|
||||||
|
py3-pillow \
|
||||||
|
py3-magic \
|
||||||
|
py3-feedparser \
|
||||||
|
py3-lxml
|
||||||
|
# py3-gitlab
|
||||||
|
# py3-semver
|
||||||
|
# TODO remove pillow, magic, feedparser, lxml, gitlab and semver when maubot supports installing dependencies
|
||||||
|
|
||||||
|
COPY requirements.txt /opt/maubot/requirements.txt
|
||||||
|
COPY optional-requirements.txt /opt/maubot/optional-requirements.txt
|
||||||
|
WORKDIR /opt/maubot
|
||||||
|
RUN apk add --virtual .build-deps python3-dev build-base git \
|
||||||
|
&& pip3 install --break-system-packages -r requirements.txt -r optional-requirements.txt \
|
||||||
|
dateparser langdetect python-gitlab pyquery semver tzlocal cssselect \
|
||||||
|
&& apk del .build-deps
|
||||||
|
# TODO also remove dateparser, langdetect and pyquery when maubot supports installing dependencies
|
||||||
|
|
||||||
|
COPY . /opt/maubot
|
||||||
|
RUN cp /opt/maubot/maubot/example-config.yaml /opt/maubot
|
||||||
|
COPY ./docker/mbc.sh /usr/local/bin/mbc
|
||||||
|
ENV UID=1337 GID=1337 XDG_CONFIG_HOME=/data
|
||||||
|
VOLUME /data
|
||||||
|
|
||||||
|
CMD ["/opt/maubot/docker/run.sh"]
|
5
MANIFEST.in
Normal file
5
MANIFEST.in
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
include README.md
|
||||||
|
include CHANGELOG.md
|
||||||
|
include LICENSE
|
||||||
|
include requirements.txt
|
||||||
|
include optional-requirements.txt
|
44
README.md
44
README.md
@ -1,37 +1,29 @@
|
|||||||
# maubot
|
# maubot
|
||||||
|
![Languages](https://img.shields.io/github/languages/top/maubot/maubot.svg)
|
||||||
|
[![License](https://img.shields.io/github/license/maubot/maubot.svg)](LICENSE)
|
||||||
|
[![Release](https://img.shields.io/github/release/maubot/maubot/all.svg)](https://github.com/maubot/maubot/releases)
|
||||||
|
[![GitLab CI](https://mau.dev/maubot/maubot/badges/master/pipeline.svg)](https://mau.dev/maubot/maubot/container_registry)
|
||||||
|
[![Code style](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black)
|
||||||
|
[![Imports](https://img.shields.io/badge/%20imports-isort-%231674b1?style=flat&labelColor=ef8336)](https://pycqa.github.io/isort/)
|
||||||
|
|
||||||
A plugin-based [Matrix](https://matrix.org) bot system written in Python.
|
A plugin-based [Matrix](https://matrix.org) bot system written in Python.
|
||||||
|
|
||||||
### [Wiki](https://github.com/maubot/maubot/wiki)
|
## Documentation
|
||||||
|
|
||||||
### [Management API spec](https://github.com/maubot/maubot/blob/master/maubot/management/api/spec.md)
|
All setup and usage instructions are located on
|
||||||
|
[docs.mau.fi](https://docs.mau.fi/maubot/index.html). Some quick links:
|
||||||
|
|
||||||
|
* [Setup](https://docs.mau.fi/maubot/usage/setup/index.html)
|
||||||
|
(or [with Docker](https://docs.mau.fi/maubot/usage/setup/docker.html))
|
||||||
|
* [Basic usage](https://docs.mau.fi/maubot/usage/basic.html)
|
||||||
|
* [Encryption](https://docs.mau.fi/maubot/usage/encryption.html)
|
||||||
|
|
||||||
## Discussion
|
## Discussion
|
||||||
Matrix room: [#maubot:maunium.net](https://matrix.to/#/#maubot:maunium.net)
|
Matrix room: [#maubot:maunium.net](https://matrix.to/#/#maubot:maunium.net)
|
||||||
|
|
||||||
## Plugins
|
## Plugins
|
||||||
* [jesaribot](https://github.com/maubot/jesaribot) - A simple bot that replies with an image when you say "jesari".
|
A list of plugins can be found at [plugins.mau.bot](https://plugins.mau.bot/).
|
||||||
* [sed](https://github.com/maubot/sed) - A bot to do sed-like replacements.
|
|
||||||
* [factorial](https://github.com/maubot/factorial) - A bot to calculate unexpected factorials.
|
|
||||||
* [media](https://github.com/maubot/media) - A bot that replies with the MXC URI of images you send it.
|
|
||||||
* [dice](https://github.com/maubot/dice) - A combined dice rolling and calculator bot.
|
|
||||||
* [karma](https://github.com/maubot/karma) - A user karma tracker bot.
|
|
||||||
* [xkcd](https://github.com/maubot/xkcd) - A bot to view xkcd comics.
|
|
||||||
* [echo](https://github.com/maubot/echo) - A bot that echoes pings and other stuff.
|
|
||||||
* [rss](https://github.com/maubot/rss) - A bot that posts RSS feed updates to Matrix.
|
|
||||||
* [reddit](https://github.com/TomCasavant/RedditMaubot) - A bot that condescendingly corrects a user when they enter an r/subreddit without providing a link to that subreddit
|
|
||||||
* [giphy](https://github.com/TomCasavant/GiphyMaubot) - A bot that generates a gif (from giphy) given search terms
|
|
||||||
* [trump](https://github.com/jeffcasavant/MaubotTrumpTweet) - A bot that generates a Trump tweet with the given content
|
|
||||||
* [poll](https://github.com/TomCasavant/PollMaubot) - A bot that will create a simple poll for users in a room
|
|
||||||
* [urban](https://github.com/dvdgsng/UrbanMaubot) - A bot that fetches definitions from [Urban Dictionary](https://www.urbandictionary.com/).
|
|
||||||
* [reminder](https://github.com/maubot/reminder) - A bot to remind you about things.
|
|
||||||
* [translate](https://github.com/maubot/translate) - A bot to translate words.
|
|
||||||
* [reactbot](https://github.com/maubot/reactbot) - A bot that responds to messages that match predefined rules.
|
|
||||||
* [exec](https://github.com/maubot/exec) - A bot that executes code.
|
|
||||||
* [commitstrip](https://github.com/maubot/commitstrip) - A bot to view CommitStrips.
|
|
||||||
* [supportportal](https://github.com/maubot/supportportal) - A bot to manage customer support on Matrix.
|
|
||||||
* [gitlab](https://github.com/maubot/gitlab) - A GitLab client and webhook receiver.
|
|
||||||
* [github](https://github.com/maubot/github) - A GitHub client and webhook receiver.
|
|
||||||
|
|
||||||
Open a pull request or join the Matrix room linked above to get your plugin listed here
|
To add your plugin to the list, send a pull request to <https://github.com/maubot/plugins.maubot.xyz>.
|
||||||
|
|
||||||
The plugin wishlist lives at https://github.com/maubot/plugin-wishlist/issues
|
The plugin wishlist lives at <https://github.com/maubot/plugin-wishlist/issues>.
|
||||||
|
83
alembic.ini
83
alembic.ini
@ -1,83 +0,0 @@
|
|||||||
# A generic, single database configuration.
|
|
||||||
|
|
||||||
[alembic]
|
|
||||||
# path to migration scripts
|
|
||||||
script_location = alembic
|
|
||||||
|
|
||||||
# template used to generate migration files
|
|
||||||
# file_template = %%(rev)s_%%(slug)s
|
|
||||||
|
|
||||||
# timezone to use when rendering the date
|
|
||||||
# within the migration file as well as the filename.
|
|
||||||
# string value is passed to dateutil.tz.gettz()
|
|
||||||
# leave blank for localtime
|
|
||||||
# timezone =
|
|
||||||
|
|
||||||
# max length of characters to apply to the
|
|
||||||
# "slug" field
|
|
||||||
# truncate_slug_length = 40
|
|
||||||
|
|
||||||
# set to 'true' to run the environment during
|
|
||||||
# the 'revision' command, regardless of autogenerate
|
|
||||||
# revision_environment = false
|
|
||||||
|
|
||||||
# set to 'true' to allow .pyc and .pyo files without
|
|
||||||
# a source .py file to be detected as revisions in the
|
|
||||||
# versions/ directory
|
|
||||||
# sourceless = false
|
|
||||||
|
|
||||||
# version location specification; this defaults
|
|
||||||
# to alembic/versions. When using multiple version
|
|
||||||
# directories, initial revisions must be specified with --version-path
|
|
||||||
# version_locations = %(here)s/bar %(here)s/bat alembic/versions
|
|
||||||
|
|
||||||
# the output encoding used when revision files
|
|
||||||
# are written from script.py.mako
|
|
||||||
# output_encoding = utf-8
|
|
||||||
|
|
||||||
|
|
||||||
[post_write_hooks]
|
|
||||||
# post_write_hooks defines scripts or Python functions that are run
|
|
||||||
# on newly generated revision scripts. See the documentation for further
|
|
||||||
# detail and examples
|
|
||||||
|
|
||||||
# format using "black" - use the console_scripts runner, against the "black" entrypoint
|
|
||||||
# hooks=black
|
|
||||||
# black.type=console_scripts
|
|
||||||
# black.entrypoint=black
|
|
||||||
# black.options=-l 79
|
|
||||||
|
|
||||||
# Logging configuration
|
|
||||||
[loggers]
|
|
||||||
keys = root,sqlalchemy,alembic
|
|
||||||
|
|
||||||
[handlers]
|
|
||||||
keys = console
|
|
||||||
|
|
||||||
[formatters]
|
|
||||||
keys = generic
|
|
||||||
|
|
||||||
[logger_root]
|
|
||||||
level = WARN
|
|
||||||
handlers = console
|
|
||||||
qualname =
|
|
||||||
|
|
||||||
[logger_sqlalchemy]
|
|
||||||
level = WARN
|
|
||||||
handlers =
|
|
||||||
qualname = sqlalchemy.engine
|
|
||||||
|
|
||||||
[logger_alembic]
|
|
||||||
level = INFO
|
|
||||||
handlers =
|
|
||||||
qualname = alembic
|
|
||||||
|
|
||||||
[handler_console]
|
|
||||||
class = StreamHandler
|
|
||||||
args = (sys.stderr,)
|
|
||||||
level = NOTSET
|
|
||||||
formatter = generic
|
|
||||||
|
|
||||||
[formatter_generic]
|
|
||||||
format = %(levelname)-5.5s [%(name)s] %(message)s
|
|
||||||
datefmt = %H:%M:%S
|
|
@ -1 +0,0 @@
|
|||||||
Generic single-database configuration.
|
|
@ -1,90 +0,0 @@
|
|||||||
from logging.config import fileConfig
|
|
||||||
|
|
||||||
from sqlalchemy import engine_from_config, pool
|
|
||||||
|
|
||||||
from alembic import context
|
|
||||||
|
|
||||||
import sys
|
|
||||||
from os.path import abspath, dirname
|
|
||||||
|
|
||||||
sys.path.insert(0, dirname(dirname(abspath(__file__))))
|
|
||||||
|
|
||||||
from mautrix.util.db import Base
|
|
||||||
from maubot.config import Config
|
|
||||||
from maubot import db
|
|
||||||
|
|
||||||
# this is the Alembic Config object, which provides
|
|
||||||
# access to the values within the .ini file in use.
|
|
||||||
config = context.config
|
|
||||||
|
|
||||||
maubot_config_path = context.get_x_argument(as_dictionary=True).get("config", "config.yaml")
|
|
||||||
maubot_config = Config(maubot_config_path, None)
|
|
||||||
maubot_config.load()
|
|
||||||
config.set_main_option("sqlalchemy.url", maubot_config["database"].replace("%", "%%"))
|
|
||||||
|
|
||||||
# Interpret the config file for Python logging.
|
|
||||||
# This line sets up loggers basically.
|
|
||||||
fileConfig(config.config_file_name)
|
|
||||||
|
|
||||||
# add your model's MetaData object here
|
|
||||||
# for 'autogenerate' support
|
|
||||||
# from myapp import mymodel
|
|
||||||
# target_metadata = mymodel.Base.metadata
|
|
||||||
target_metadata = Base.metadata
|
|
||||||
|
|
||||||
# other values from the config, defined by the needs of env.py,
|
|
||||||
# can be acquired:
|
|
||||||
# my_important_option = config.get_main_option("my_important_option")
|
|
||||||
# ... etc.
|
|
||||||
|
|
||||||
|
|
||||||
def run_migrations_offline():
|
|
||||||
"""Run migrations in 'offline' mode.
|
|
||||||
|
|
||||||
This configures the context with just a URL
|
|
||||||
and not an Engine, though an Engine is acceptable
|
|
||||||
here as well. By skipping the Engine creation
|
|
||||||
we don't even need a DBAPI to be available.
|
|
||||||
|
|
||||||
Calls to context.execute() here emit the given string to the
|
|
||||||
script output.
|
|
||||||
|
|
||||||
"""
|
|
||||||
url = config.get_main_option("sqlalchemy.url")
|
|
||||||
context.configure(
|
|
||||||
url=url,
|
|
||||||
target_metadata=target_metadata,
|
|
||||||
literal_binds=True,
|
|
||||||
dialect_opts={"paramstyle": "named"},
|
|
||||||
)
|
|
||||||
|
|
||||||
with context.begin_transaction():
|
|
||||||
context.run_migrations()
|
|
||||||
|
|
||||||
|
|
||||||
def run_migrations_online():
|
|
||||||
"""Run migrations in 'online' mode.
|
|
||||||
|
|
||||||
In this scenario we need to create an Engine
|
|
||||||
and associate a connection with the context.
|
|
||||||
|
|
||||||
"""
|
|
||||||
connectable = engine_from_config(
|
|
||||||
config.get_section(config.config_ini_section),
|
|
||||||
prefix="sqlalchemy.",
|
|
||||||
poolclass=pool.NullPool,
|
|
||||||
)
|
|
||||||
|
|
||||||
with connectable.connect() as connection:
|
|
||||||
context.configure(
|
|
||||||
connection=connection, target_metadata=target_metadata
|
|
||||||
)
|
|
||||||
|
|
||||||
with context.begin_transaction():
|
|
||||||
context.run_migrations()
|
|
||||||
|
|
||||||
|
|
||||||
if context.is_offline_mode():
|
|
||||||
run_migrations_offline()
|
|
||||||
else:
|
|
||||||
run_migrations_online()
|
|
@ -1,24 +0,0 @@
|
|||||||
"""${message}
|
|
||||||
|
|
||||||
Revision ID: ${up_revision}
|
|
||||||
Revises: ${down_revision | comma,n}
|
|
||||||
Create Date: ${create_date}
|
|
||||||
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
${imports if imports else ""}
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = ${repr(up_revision)}
|
|
||||||
down_revision = ${repr(down_revision)}
|
|
||||||
branch_labels = ${repr(branch_labels)}
|
|
||||||
depends_on = ${repr(depends_on)}
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
${upgrades if upgrades else "pass"}
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
${downgrades if downgrades else "pass"}
|
|
@ -1,40 +0,0 @@
|
|||||||
"""Let plugins have multiple files
|
|
||||||
|
|
||||||
Revision ID: 6b66c1600d16
|
|
||||||
Revises: d295f8dcfa64
|
|
||||||
Create Date: 2020-01-02 01:30:51.622962
|
|
||||||
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = "6b66c1600d16"
|
|
||||||
down_revision = "d295f8dcfa64"
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
plugin_file: sa.Table = op.create_table(
|
|
||||||
"plugin_file",
|
|
||||||
sa.Column("plugin_id", sa.String(length=255), nullable=False),
|
|
||||||
sa.Column("file_name", sa.String(length=255), nullable=False),
|
|
||||||
sa.Column("content", sa.Text(), nullable=False),
|
|
||||||
sa.ForeignKeyConstraint(("plugin_id",), ["plugin.id"], onupdate="CASCADE",
|
|
||||||
ondelete="CASCADE"),
|
|
||||||
sa.PrimaryKeyConstraint("plugin_id", "file_name"))
|
|
||||||
|
|
||||||
conn: sa.engine.Connection = op.get_bind()
|
|
||||||
conn.execute(plugin_file.insert().values([{
|
|
||||||
"plugin_id": plugin_id,
|
|
||||||
"file_name": "config.yaml",
|
|
||||||
"content": config
|
|
||||||
} for plugin_id, config in conn.execute("SELECT id, config FROM plugin").fetchall()]))
|
|
||||||
|
|
||||||
op.drop_column("plugin", "config")
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
op.add_column("plugin", sa.Column("config", sa.TEXT(), autoincrement=False, nullable=False))
|
|
||||||
op.drop_table("plugin_file")
|
|
@ -1,50 +0,0 @@
|
|||||||
"""Initial revision
|
|
||||||
|
|
||||||
Revision ID: d295f8dcfa64
|
|
||||||
Revises:
|
|
||||||
Create Date: 2019-09-27 00:21:02.527915
|
|
||||||
|
|
||||||
"""
|
|
||||||
from alembic import op
|
|
||||||
import sqlalchemy as sa
|
|
||||||
|
|
||||||
|
|
||||||
# revision identifiers, used by Alembic.
|
|
||||||
revision = 'd295f8dcfa64'
|
|
||||||
down_revision = None
|
|
||||||
branch_labels = None
|
|
||||||
depends_on = None
|
|
||||||
|
|
||||||
|
|
||||||
def upgrade():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.create_table('client',
|
|
||||||
sa.Column('id', sa.String(length=255), nullable=False),
|
|
||||||
sa.Column('homeserver', sa.String(length=255), nullable=False),
|
|
||||||
sa.Column('access_token', sa.Text(), nullable=False),
|
|
||||||
sa.Column('enabled', sa.Boolean(), nullable=False),
|
|
||||||
sa.Column('next_batch', sa.String(length=255), nullable=False),
|
|
||||||
sa.Column('filter_id', sa.String(length=255), nullable=False),
|
|
||||||
sa.Column('sync', sa.Boolean(), nullable=False),
|
|
||||||
sa.Column('autojoin', sa.Boolean(), nullable=False),
|
|
||||||
sa.Column('displayname', sa.String(length=255), nullable=False),
|
|
||||||
sa.Column('avatar_url', sa.String(length=255), nullable=False),
|
|
||||||
sa.PrimaryKeyConstraint('id')
|
|
||||||
)
|
|
||||||
op.create_table('plugin',
|
|
||||||
sa.Column('id', sa.String(length=255), nullable=False),
|
|
||||||
sa.Column('type', sa.String(length=255), nullable=False),
|
|
||||||
sa.Column('enabled', sa.Boolean(), nullable=False),
|
|
||||||
sa.Column('primary_user', sa.String(length=255), nullable=False),
|
|
||||||
sa.Column('config', sa.Text(), nullable=False),
|
|
||||||
sa.ForeignKeyConstraint(['primary_user'], ['client.id'], onupdate='CASCADE', ondelete='RESTRICT'),
|
|
||||||
sa.PrimaryKeyConstraint('id')
|
|
||||||
)
|
|
||||||
# ### end Alembic commands ###
|
|
||||||
|
|
||||||
|
|
||||||
def downgrade():
|
|
||||||
# ### commands auto generated by Alembic - please adjust! ###
|
|
||||||
op.drop_table('plugin')
|
|
||||||
op.drop_table('client')
|
|
||||||
# ### end Alembic commands ###
|
|
3
dev-requirements.txt
Normal file
3
dev-requirements.txt
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
pre-commit>=2.10.1,<3
|
||||||
|
isort>=5.10.1,<6
|
||||||
|
black>=24,<25
|
@ -1,96 +0,0 @@
|
|||||||
# The full URI to the database. SQLite and Postgres are fully supported.
|
|
||||||
# Other DBMSes supported by SQLAlchemy may or may not work.
|
|
||||||
# Format examples:
|
|
||||||
# SQLite: sqlite:///filename.db
|
|
||||||
# Postgres: postgres://username:password@hostname/dbname
|
|
||||||
database: sqlite:////data/maubot.db
|
|
||||||
|
|
||||||
plugin_directories:
|
|
||||||
# The directory where uploaded new plugins should be stored.
|
|
||||||
upload: /data/plugins
|
|
||||||
# The directories from which plugins should be loaded.
|
|
||||||
# Duplicate plugin IDs will be moved to the trash.
|
|
||||||
load:
|
|
||||||
- /data/plugins
|
|
||||||
# The directory where old plugin versions and conflicting plugins should be moved.
|
|
||||||
# Set to "delete" to delete files immediately.
|
|
||||||
trash: /data/trash
|
|
||||||
# The directory where plugin databases should be stored.
|
|
||||||
db: /data/plugins
|
|
||||||
|
|
||||||
server:
|
|
||||||
# The IP and port to listen to.
|
|
||||||
hostname: 0.0.0.0
|
|
||||||
port: 29316
|
|
||||||
# Public base URL where the server is visible.
|
|
||||||
public_url: https://example.com
|
|
||||||
# The base management API path.
|
|
||||||
base_path: /_matrix/maubot/v1
|
|
||||||
# The base path for the UI.
|
|
||||||
ui_base_path: /_matrix/maubot
|
|
||||||
# The base path for plugin endpoints. The instance ID will be appended directly.
|
|
||||||
plugin_base_path: /_matrix/maubot/plugin/
|
|
||||||
# Override path from where to load UI resources.
|
|
||||||
# Set to false to using pkg_resources to find the path.
|
|
||||||
override_resource_path: /opt/maubot/frontend
|
|
||||||
# The base appservice API path. Use / for legacy appservice API and /_matrix/app/v1 for v1.
|
|
||||||
appservice_base_path: /_matrix/app/v1
|
|
||||||
# The shared secret to sign API access tokens.
|
|
||||||
# Set to "generate" to generate and save a new token at startup.
|
|
||||||
unshared_secret: generate
|
|
||||||
|
|
||||||
# Shared registration secrets to allow registering new users from the management UI
|
|
||||||
registration_secrets:
|
|
||||||
example.com:
|
|
||||||
# Client-server API URL
|
|
||||||
url: https://example.com
|
|
||||||
# registration_shared_secret from synapse config
|
|
||||||
secret: synapse_shared_registration_secret
|
|
||||||
|
|
||||||
# List of administrator users. Plaintext passwords will be bcrypted on startup. Set empty password
|
|
||||||
# to prevent normal login. Root is a special user that can't have a password and will always exist.
|
|
||||||
admins:
|
|
||||||
root: ""
|
|
||||||
|
|
||||||
# API feature switches.
|
|
||||||
api_features:
|
|
||||||
login: true
|
|
||||||
plugin: true
|
|
||||||
plugin_upload: true
|
|
||||||
instance: true
|
|
||||||
instance_database: true
|
|
||||||
client: true
|
|
||||||
client_proxy: true
|
|
||||||
client_auth: true
|
|
||||||
dev_open: true
|
|
||||||
log: true
|
|
||||||
|
|
||||||
# Python logging configuration.
|
|
||||||
#
|
|
||||||
# See section 16.7.2 of the Python documentation for more info:
|
|
||||||
# https://docs.python.org/3.6/library/logging.config.html#configuration-dictionary-schema
|
|
||||||
logging:
|
|
||||||
version: 1
|
|
||||||
formatters:
|
|
||||||
precise:
|
|
||||||
format: "[%(asctime)s] [%(levelname)s@%(name)s] %(message)s"
|
|
||||||
handlers:
|
|
||||||
file:
|
|
||||||
class: logging.handlers.RotatingFileHandler
|
|
||||||
formatter: precise
|
|
||||||
filename: /var/log/maubot.log
|
|
||||||
maxBytes: 10485760
|
|
||||||
backupCount: 10
|
|
||||||
console:
|
|
||||||
class: logging.StreamHandler
|
|
||||||
formatter: precise
|
|
||||||
loggers:
|
|
||||||
maubot:
|
|
||||||
level: DEBUG
|
|
||||||
mautrix:
|
|
||||||
level: DEBUG
|
|
||||||
aiohttp:
|
|
||||||
level: INFO
|
|
||||||
root:
|
|
||||||
level: DEBUG
|
|
||||||
handlers: [file, console]
|
|
3
docker/mbc.sh
Executable file
3
docker/mbc.sh
Executable file
@ -0,0 +1,3 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
export PYTHONPATH=/opt/maubot
|
||||||
|
python3 -m maubot.cli "$@"
|
@ -1,21 +1,46 @@
|
|||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
|
|
||||||
function fixperms {
|
function fixperms {
|
||||||
chown -R $UID:$GID /var/log /data /opt/maubot
|
chown -R $UID:$GID /var/log /data
|
||||||
|
}
|
||||||
|
|
||||||
|
function fixdefault {
|
||||||
|
_value=$(yq e "$1" /data/config.yaml)
|
||||||
|
if [[ "$_value" == "$2" ]]; then
|
||||||
|
yq e -i "$1 = "'"'"$3"'"' /data/config.yaml
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
function fixconfig {
|
||||||
|
# Change relative default paths to absolute paths in /data
|
||||||
|
fixdefault '.database' 'sqlite:maubot.db' 'sqlite:/data/maubot.db'
|
||||||
|
fixdefault '.plugin_directories.upload' './plugins' '/data/plugins'
|
||||||
|
fixdefault '.plugin_directories.load[0]' './plugins' '/data/plugins'
|
||||||
|
fixdefault '.plugin_directories.trash' './trash' '/data/trash'
|
||||||
|
fixdefault '.plugin_databases.sqlite' './plugins' '/data/dbs'
|
||||||
|
fixdefault '.plugin_databases.sqlite' './dbs' '/data/dbs'
|
||||||
|
fixdefault '.logging.handlers.file.filename' './maubot.log' '/var/log/maubot.log'
|
||||||
|
# This doesn't need to be configurable
|
||||||
|
yq e -i '.server.override_resource_path = "/opt/maubot/frontend"' /data/config.yaml
|
||||||
}
|
}
|
||||||
|
|
||||||
cd /opt/maubot
|
cd /opt/maubot
|
||||||
|
|
||||||
|
mkdir -p /var/log/maubot /data/plugins /data/trash /data/dbs
|
||||||
|
|
||||||
if [ ! -f /data/config.yaml ]; then
|
if [ ! -f /data/config.yaml ]; then
|
||||||
cp docker/example-config.yaml /data/config.yaml
|
cp example-config.yaml /data/config.yaml
|
||||||
mkdir -p /var/log /data/plugins /data/trash /data/dbs
|
|
||||||
echo "Config file not found. Example config copied to /data/config.yaml"
|
echo "Config file not found. Example config copied to /data/config.yaml"
|
||||||
echo "Please modify the config file to your liking and restart the container."
|
echo "Please modify the config file to your liking and restart the container."
|
||||||
fixperms
|
fixperms
|
||||||
|
fixconfig
|
||||||
exit
|
exit
|
||||||
fi
|
fi
|
||||||
|
|
||||||
mkdir -p /var/log/maubot /data/plugins /data/trash /data/dbs
|
|
||||||
alembic -x config=/data/config.yaml upgrade head
|
|
||||||
fixperms
|
fixperms
|
||||||
exec su-exec $UID:$GID python3 -m maubot -c /data/config.yaml -b docker/example-config.yaml
|
fixconfig
|
||||||
|
if ls /data/plugins/*.db > /dev/null 2>&1; then
|
||||||
|
mv -n /data/plugins/*.db /data/dbs/
|
||||||
|
fi
|
||||||
|
|
||||||
|
exec su-exec $UID:$GID python3 -m maubot -c /data/config.yaml
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
The MIT License (MIT)
|
The MIT License (MIT)
|
||||||
|
|
||||||
Copyright (c) 2018 Tulir Asokan
|
Copyright (c) 2022 Tulir Asokan
|
||||||
|
|
||||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||||
this software and associated documentation files (the "Software"), to deal in
|
this software and associated documentation files (the "Software"), to deal in
|
||||||
|
@ -4,3 +4,4 @@ All examples are published under the [MIT license](LICENSE).
|
|||||||
* [Hello World](helloworld/) - Very basic event handling bot that responds "Hello, World!" to all messages.
|
* [Hello World](helloworld/) - Very basic event handling bot that responds "Hello, World!" to all messages.
|
||||||
* [Echo bot](https://github.com/maubot/echo) - Basic command handling bot with !echo and !ping commands
|
* [Echo bot](https://github.com/maubot/echo) - Basic command handling bot with !echo and !ping commands
|
||||||
* [Config example](config/) - Simple example of using a config file
|
* [Config example](config/) - Simple example of using a config file
|
||||||
|
* [Database example](database/) - Simple example of using a database
|
||||||
|
@ -1,2 +1,5 @@
|
|||||||
# Message to send when user sends !getmessage
|
# Who is allowed to use the bot?
|
||||||
message: Default configuration active
|
whitelist:
|
||||||
|
- "@user:example.com"
|
||||||
|
# The prefix for the main command without the !
|
||||||
|
command_prefix: hello-world
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
from typing import Type
|
from typing import Type
|
||||||
|
|
||||||
from mautrix.util.config import BaseProxyConfig, ConfigUpdateHelper
|
from mautrix.util.config import BaseProxyConfig, ConfigUpdateHelper
|
||||||
from maubot import Plugin, MessageEvent
|
from maubot import Plugin, MessageEvent
|
||||||
from maubot.handlers import command
|
from maubot.handlers import command
|
||||||
@ -7,19 +6,22 @@ from maubot.handlers import command
|
|||||||
|
|
||||||
class Config(BaseProxyConfig):
|
class Config(BaseProxyConfig):
|
||||||
def do_update(self, helper: ConfigUpdateHelper) -> None:
|
def do_update(self, helper: ConfigUpdateHelper) -> None:
|
||||||
helper.copy("message")
|
helper.copy("whitelist")
|
||||||
|
helper.copy("command_prefix")
|
||||||
|
|
||||||
|
|
||||||
class DatabaseBot(Plugin):
|
class ConfigurableBot(Plugin):
|
||||||
async def start(self) -> None:
|
async def start(self) -> None:
|
||||||
await super().start()
|
|
||||||
self.config.load_and_update()
|
self.config.load_and_update()
|
||||||
|
|
||||||
|
def get_command_name(self) -> str:
|
||||||
|
return self.config["command_prefix"]
|
||||||
|
|
||||||
|
@command.new(name=get_command_name)
|
||||||
|
async def hmm(self, evt: MessageEvent) -> None:
|
||||||
|
if evt.sender in self.config["whitelist"]:
|
||||||
|
await evt.reply("You're whitelisted 🎉")
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_config_class(cls) -> Type[BaseProxyConfig]:
|
def get_config_class(cls) -> Type[BaseProxyConfig]:
|
||||||
return Config
|
return Config
|
||||||
|
|
||||||
@command.new("getmessage")
|
|
||||||
async def handler(self, event: MessageEvent) -> None:
|
|
||||||
if event.sender != self.client.mxid:
|
|
||||||
await event.reply(self.config["message"])
|
|
||||||
|
@ -1,11 +1,12 @@
|
|||||||
maubot: 0.1.0
|
maubot: 0.1.0
|
||||||
id: xyz.maubot.databasebot
|
id: xyz.maubot.configurablebot
|
||||||
version: 1.0.0
|
version: 2.0.0
|
||||||
license: MIT
|
license: MIT
|
||||||
modules:
|
modules:
|
||||||
- configurablebot
|
- configurablebot
|
||||||
main_class: ConfigurableBot
|
main_class: ConfigurableBot
|
||||||
database: false
|
database: false
|
||||||
|
config: true
|
||||||
|
|
||||||
# Instruct the build tool to include the base config.
|
# Instruct the build tool to include the base config.
|
||||||
extra_files:
|
extra_files:
|
||||||
|
10
examples/database/maubot.yaml
Normal file
10
examples/database/maubot.yaml
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
maubot: 0.1.0
|
||||||
|
id: xyz.maubot.storagebot
|
||||||
|
version: 2.0.0
|
||||||
|
license: MIT
|
||||||
|
modules:
|
||||||
|
- storagebot
|
||||||
|
main_class: StorageBot
|
||||||
|
database: true
|
||||||
|
database_type: asyncpg
|
||||||
|
config: false
|
72
examples/database/storagebot.py
Normal file
72
examples/database/storagebot.py
Normal file
@ -0,0 +1,72 @@
|
|||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from mautrix.util.async_db import UpgradeTable, Connection
|
||||||
|
from maubot import Plugin, MessageEvent
|
||||||
|
from maubot.handlers import command
|
||||||
|
|
||||||
|
upgrade_table = UpgradeTable()
|
||||||
|
|
||||||
|
|
||||||
|
@upgrade_table.register(description="Initial revision")
|
||||||
|
async def upgrade_v1(conn: Connection) -> None:
|
||||||
|
await conn.execute(
|
||||||
|
"""CREATE TABLE stored_data (
|
||||||
|
key TEXT PRIMARY KEY,
|
||||||
|
value TEXT NOT NULL
|
||||||
|
)"""
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@upgrade_table.register(description="Remember user who added value")
|
||||||
|
async def upgrade_v2(conn: Connection) -> None:
|
||||||
|
await conn.execute("ALTER TABLE stored_data ADD COLUMN creator TEXT")
|
||||||
|
|
||||||
|
|
||||||
|
class StorageBot(Plugin):
|
||||||
|
@command.new()
|
||||||
|
async def storage(self, evt: MessageEvent) -> None:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@storage.subcommand(help="Store a value")
|
||||||
|
@command.argument("key")
|
||||||
|
@command.argument("value", pass_raw=True)
|
||||||
|
async def put(self, evt: MessageEvent, key: str, value: str) -> None:
|
||||||
|
q = """
|
||||||
|
INSERT INTO stored_data (key, value, creator) VALUES ($1, $2, $3)
|
||||||
|
ON CONFLICT (key) DO UPDATE SET value=excluded.value, creator=excluded.creator
|
||||||
|
"""
|
||||||
|
await self.database.execute(q, key, value, evt.sender)
|
||||||
|
await evt.reply(f"Inserted {key} into the database")
|
||||||
|
|
||||||
|
@storage.subcommand(help="Get a value from the storage")
|
||||||
|
@command.argument("key")
|
||||||
|
async def get(self, evt: MessageEvent, key: str) -> None:
|
||||||
|
q = "SELECT key, value, creator FROM stored_data WHERE LOWER(key)=LOWER($1)"
|
||||||
|
row = await self.database.fetchrow(q, key)
|
||||||
|
if row:
|
||||||
|
key = row["key"]
|
||||||
|
value = row["value"]
|
||||||
|
creator = row["creator"]
|
||||||
|
await evt.reply(f"`{key}` stored by {creator}:\n\n```\n{value}\n```")
|
||||||
|
else:
|
||||||
|
await evt.reply(f"No data stored under `{key}` :(")
|
||||||
|
|
||||||
|
@storage.subcommand(help="List keys in the storage")
|
||||||
|
@command.argument("prefix", required=False)
|
||||||
|
async def list(self, evt: MessageEvent, prefix: str | None) -> None:
|
||||||
|
q = "SELECT key, creator FROM stored_data WHERE key LIKE $1"
|
||||||
|
rows = await self.database.fetch(q, prefix + "%")
|
||||||
|
prefix_reply = f" starting with `{prefix}`" if prefix else ""
|
||||||
|
if len(rows) == 0:
|
||||||
|
await evt.reply(f"Nothing{prefix_reply} stored in database :(")
|
||||||
|
else:
|
||||||
|
formatted_data = "\n".join(
|
||||||
|
f"* `{row['key']}` stored by {row['creator']}" for row in rows
|
||||||
|
)
|
||||||
|
await evt.reply(
|
||||||
|
f"Found {len(rows)} keys{prefix_reply} in database:\n\n{formatted_data}"
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_db_upgrade_table(cls) -> UpgradeTable | None:
|
||||||
|
return upgrade_table
|
@ -1,3 +1,4 @@
|
|||||||
|
from .__meta__ import __version__
|
||||||
|
from .matrix import MaubotMatrixClient as Client, MaubotMessageEvent as MessageEvent
|
||||||
from .plugin_base import Plugin
|
from .plugin_base import Plugin
|
||||||
from .plugin_server import PluginWebApp
|
from .plugin_server import PluginWebApp
|
||||||
from .matrix import MaubotMatrixClient as Client, MaubotMessageEvent as MessageEvent
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# maubot - A plugin-based Matrix bot system.
|
# maubot - A plugin-based Matrix bot system.
|
||||||
# Copyright (C) 2019 Tulir Asokan
|
# Copyright (C) 2022 Tulir Asokan
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
@ -13,82 +13,171 @@
|
|||||||
#
|
#
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
import logging.config
|
from __future__ import annotations
|
||||||
import argparse
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import signal
|
|
||||||
import copy
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from .config import Config
|
from mautrix.util.async_db import Database, DatabaseException, PostgresDatabase, Scheme
|
||||||
from .db import init as init_db
|
from mautrix.util.program import Program
|
||||||
from .server import MaubotServer
|
|
||||||
from .client import Client, init as init_client_class
|
|
||||||
from .loader.zip import init as init_zip_loader
|
|
||||||
from .instance import init as init_plugin_instance_class
|
|
||||||
from .management.api import init as init_mgmt_api
|
|
||||||
from .__meta__ import __version__
|
from .__meta__ import __version__
|
||||||
|
from .client import Client
|
||||||
parser = argparse.ArgumentParser(description="A plugin-based Matrix bot system.",
|
from .config import Config
|
||||||
prog="python -m maubot")
|
from .db import init as init_db, upgrade_table
|
||||||
parser.add_argument("-c", "--config", type=str, default="config.yaml",
|
from .instance import PluginInstance
|
||||||
metavar="<path>", help="the path to your config file")
|
from .lib.future_awaitable import FutureAwaitable
|
||||||
parser.add_argument("-b", "--base-config", type=str, default="example-config.yaml",
|
from .lib.state_store import PgStateStore
|
||||||
metavar="<path>", help="the path to the example config "
|
from .loader.zip import init as init_zip_loader
|
||||||
"(for automatic config updates)")
|
from .management.api import init as init_mgmt_api
|
||||||
args = parser.parse_args()
|
from .server import MaubotServer
|
||||||
|
|
||||||
config = Config(args.config, args.base_config)
|
|
||||||
config.load()
|
|
||||||
config.update()
|
|
||||||
|
|
||||||
logging.config.dictConfig(copy.deepcopy(config["logging"]))
|
|
||||||
|
|
||||||
stop_log_listener = None
|
|
||||||
if config["api_features.log"]:
|
|
||||||
from .management.api.log import init as init_log_listener, stop_all as stop_log_listener
|
|
||||||
|
|
||||||
init_log_listener()
|
|
||||||
|
|
||||||
log = logging.getLogger("maubot.init")
|
|
||||||
log.info(f"Initializing maubot {__version__}")
|
|
||||||
|
|
||||||
loop = asyncio.get_event_loop()
|
|
||||||
|
|
||||||
init_zip_loader(config)
|
|
||||||
db_engine = init_db(config)
|
|
||||||
clients = init_client_class(loop)
|
|
||||||
management_api = init_mgmt_api(config, loop)
|
|
||||||
server = MaubotServer(management_api, config, loop)
|
|
||||||
plugins = init_plugin_instance_class(config, server, loop)
|
|
||||||
|
|
||||||
for plugin in plugins:
|
|
||||||
plugin.load()
|
|
||||||
|
|
||||||
signal.signal(signal.SIGINT, signal.default_int_handler)
|
|
||||||
signal.signal(signal.SIGTERM, signal.default_int_handler)
|
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
log.info("Starting server")
|
from mautrix.crypto.store import PgCryptoStore
|
||||||
loop.run_until_complete(server.start())
|
except ImportError:
|
||||||
log.info("Starting clients and plugins")
|
PgCryptoStore = None
|
||||||
loop.run_until_complete(asyncio.gather(*[client.start() for client in clients], loop=loop))
|
|
||||||
log.info("Startup actions complete, running forever")
|
|
||||||
loop.run_forever()
|
class Maubot(Program):
|
||||||
except KeyboardInterrupt:
|
config: Config
|
||||||
log.info("Interrupt received, stopping clients")
|
server: MaubotServer
|
||||||
loop.run_until_complete(asyncio.gather(*[client.stop() for client in Client.cache.values()],
|
db: Database
|
||||||
loop=loop))
|
crypto_db: Database | None
|
||||||
if stop_log_listener is not None:
|
plugin_postgres_db: PostgresDatabase | None
|
||||||
log.debug("Closing websockets")
|
state_store: PgStateStore
|
||||||
loop.run_until_complete(stop_log_listener())
|
|
||||||
log.debug("Stopping server")
|
config_class = Config
|
||||||
|
module = "maubot"
|
||||||
|
name = "maubot"
|
||||||
|
version = __version__
|
||||||
|
command = "python -m maubot"
|
||||||
|
description = "A plugin-based Matrix bot system."
|
||||||
|
|
||||||
|
def prepare_log_websocket(self) -> None:
|
||||||
|
from .management.api.log import init, stop_all
|
||||||
|
|
||||||
|
init(self.loop)
|
||||||
|
self.add_shutdown_actions(FutureAwaitable(stop_all))
|
||||||
|
|
||||||
|
def prepare_arg_parser(self) -> None:
|
||||||
|
super().prepare_arg_parser()
|
||||||
|
self.parser.add_argument(
|
||||||
|
"--ignore-unsupported-database",
|
||||||
|
action="store_true",
|
||||||
|
help="Run even if the database schema is too new",
|
||||||
|
)
|
||||||
|
self.parser.add_argument(
|
||||||
|
"--ignore-foreign-tables",
|
||||||
|
action="store_true",
|
||||||
|
help="Run even if the database contains tables from other programs (like Synapse)",
|
||||||
|
)
|
||||||
|
|
||||||
|
def prepare_db(self) -> None:
|
||||||
|
self.db = Database.create(
|
||||||
|
self.config["database"],
|
||||||
|
upgrade_table=upgrade_table,
|
||||||
|
db_args=self.config["database_opts"],
|
||||||
|
owner_name=self.name,
|
||||||
|
ignore_foreign_tables=self.args.ignore_foreign_tables,
|
||||||
|
)
|
||||||
|
init_db(self.db)
|
||||||
|
|
||||||
|
if PgCryptoStore:
|
||||||
|
if self.config["crypto_database"] == "default":
|
||||||
|
self.crypto_db = self.db
|
||||||
|
else:
|
||||||
|
self.crypto_db = Database.create(
|
||||||
|
self.config["crypto_database"],
|
||||||
|
upgrade_table=PgCryptoStore.upgrade_table,
|
||||||
|
ignore_foreign_tables=self.args.ignore_foreign_tables,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.crypto_db = None
|
||||||
|
|
||||||
|
if self.config["plugin_databases.postgres"] == "default":
|
||||||
|
if self.db.scheme != Scheme.POSTGRES:
|
||||||
|
self.log.critical(
|
||||||
|
'Using "default" as the postgres plugin database URL is only allowed if '
|
||||||
|
"the default database is postgres."
|
||||||
|
)
|
||||||
|
sys.exit(24)
|
||||||
|
assert isinstance(self.db, PostgresDatabase)
|
||||||
|
self.plugin_postgres_db = self.db
|
||||||
|
elif self.config["plugin_databases.postgres"]:
|
||||||
|
plugin_db = Database.create(
|
||||||
|
self.config["plugin_databases.postgres"],
|
||||||
|
db_args={
|
||||||
|
**self.config["database_opts"],
|
||||||
|
**self.config["plugin_databases.postgres_opts"],
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if plugin_db.scheme != Scheme.POSTGRES:
|
||||||
|
self.log.critical("The plugin postgres database URL must be a postgres database")
|
||||||
|
sys.exit(24)
|
||||||
|
assert isinstance(plugin_db, PostgresDatabase)
|
||||||
|
self.plugin_postgres_db = plugin_db
|
||||||
|
else:
|
||||||
|
self.plugin_postgres_db = None
|
||||||
|
|
||||||
|
def prepare(self) -> None:
|
||||||
|
super().prepare()
|
||||||
|
|
||||||
|
if self.config["api_features.log"]:
|
||||||
|
self.prepare_log_websocket()
|
||||||
|
|
||||||
|
init_zip_loader(self.config)
|
||||||
|
self.prepare_db()
|
||||||
|
Client.init_cls(self)
|
||||||
|
PluginInstance.init_cls(self)
|
||||||
|
management_api = init_mgmt_api(self.config, self.loop)
|
||||||
|
self.server = MaubotServer(management_api, self.config, self.loop)
|
||||||
|
self.state_store = PgStateStore(self.db)
|
||||||
|
|
||||||
|
async def start_db(self) -> None:
|
||||||
|
self.log.debug("Starting database...")
|
||||||
|
ignore_unsupported = self.args.ignore_unsupported_database
|
||||||
|
self.db.upgrade_table.allow_unsupported = ignore_unsupported
|
||||||
|
self.state_store.upgrade_table.allow_unsupported = ignore_unsupported
|
||||||
try:
|
try:
|
||||||
loop.run_until_complete(asyncio.wait_for(server.stop(), 5, loop=loop))
|
await self.db.start()
|
||||||
|
await self.state_store.upgrade_table.upgrade(self.db)
|
||||||
|
if self.plugin_postgres_db and self.plugin_postgres_db is not self.db:
|
||||||
|
await self.plugin_postgres_db.start()
|
||||||
|
if self.crypto_db:
|
||||||
|
PgCryptoStore.upgrade_table.allow_unsupported = ignore_unsupported
|
||||||
|
if self.crypto_db is not self.db:
|
||||||
|
await self.crypto_db.start()
|
||||||
|
else:
|
||||||
|
await PgCryptoStore.upgrade_table.upgrade(self.db)
|
||||||
|
except DatabaseException as e:
|
||||||
|
self.log.critical("Failed to initialize database", exc_info=e)
|
||||||
|
if e.explanation:
|
||||||
|
self.log.info(e.explanation)
|
||||||
|
sys.exit(25)
|
||||||
|
|
||||||
|
async def system_exit(self) -> None:
|
||||||
|
if hasattr(self, "db"):
|
||||||
|
self.log.trace("Stopping database due to SystemExit")
|
||||||
|
await self.db.stop()
|
||||||
|
|
||||||
|
async def start(self) -> None:
|
||||||
|
await self.start_db()
|
||||||
|
await asyncio.gather(*[plugin.load() async for plugin in PluginInstance.all()])
|
||||||
|
await asyncio.gather(*[client.start() async for client in Client.all()])
|
||||||
|
await super().start()
|
||||||
|
async for plugin in PluginInstance.all():
|
||||||
|
await plugin.load()
|
||||||
|
await self.server.start()
|
||||||
|
|
||||||
|
async def stop(self) -> None:
|
||||||
|
self.add_shutdown_actions(*(client.stop() for client in Client.cache.values()))
|
||||||
|
await super().stop()
|
||||||
|
self.log.debug("Stopping server")
|
||||||
|
try:
|
||||||
|
await asyncio.wait_for(self.server.stop(), 5)
|
||||||
except asyncio.TimeoutError:
|
except asyncio.TimeoutError:
|
||||||
log.warning("Stopping server timed out")
|
self.log.warning("Stopping server timed out")
|
||||||
log.debug("Closing event loop")
|
await self.db.stop()
|
||||||
loop.close()
|
|
||||||
log.debug("Everything stopped, shutting down")
|
|
||||||
sys.exit(0)
|
Maubot().run()
|
||||||
|
@ -1 +1 @@
|
|||||||
__version__ = "0.1.0.dev30"
|
__version__ = "0.5.0"
|
||||||
|
@ -1,2 +1,3 @@
|
|||||||
from . import app
|
from . import app
|
||||||
|
|
||||||
app()
|
app()
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# maubot - A plugin-based Matrix bot system.
|
# maubot - A plugin-based Matrix bot system.
|
||||||
# Copyright (C) 2019 Tulir Asokan
|
# Copyright (C) 2022 Tulir Asokan
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
@ -1,2 +1,2 @@
|
|||||||
from .cliq import command, option
|
from .cliq import command, option
|
||||||
from .validators import SPDXValidator, VersionValidator, PathValidator
|
from .validators import PathValidator, SPDXValidator, VersionValidator
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# maubot - A plugin-based Matrix bot system.
|
# maubot - A plugin-based Matrix bot system.
|
||||||
# Copyright (C) 2019 Tulir Asokan
|
# Copyright (C) 2022 Tulir Asokan
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
@ -13,20 +13,55 @@
|
|||||||
#
|
#
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
from typing import Any, Callable, Union, Optional
|
from __future__ import annotations
|
||||||
import functools
|
|
||||||
|
|
||||||
|
from typing import Any, Callable
|
||||||
|
import asyncio
|
||||||
|
import functools
|
||||||
|
import inspect
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
from colorama import Fore
|
||||||
from prompt_toolkit.validation import Validator
|
from prompt_toolkit.validation import Validator
|
||||||
from PyInquirer import prompt
|
from questionary import prompt
|
||||||
|
import aiohttp
|
||||||
import click
|
import click
|
||||||
|
|
||||||
from ..base import app
|
from ..base import app
|
||||||
from .validators import Required, ClickValidator
|
from ..config import get_token
|
||||||
|
from .validators import ClickValidator, Required
|
||||||
|
|
||||||
|
|
||||||
|
def with_http(func):
|
||||||
|
@functools.wraps(func)
|
||||||
|
async def wrapper(*args, **kwargs):
|
||||||
|
async with aiohttp.ClientSession() as sess:
|
||||||
|
try:
|
||||||
|
return await func(*args, sess=sess, **kwargs)
|
||||||
|
except aiohttp.ClientError as e:
|
||||||
|
print(f"{Fore.RED}Connection error: {e}{Fore.RESET}")
|
||||||
|
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
|
def with_authenticated_http(func):
|
||||||
|
@functools.wraps(func)
|
||||||
|
async def wrapper(*args, server: str, **kwargs):
|
||||||
|
server, token = get_token(server)
|
||||||
|
if not token:
|
||||||
|
return
|
||||||
|
async with aiohttp.ClientSession(headers={"Authorization": f"Bearer {token}"}) as sess:
|
||||||
|
try:
|
||||||
|
return await func(*args, sess=sess, server=server, **kwargs)
|
||||||
|
except aiohttp.ClientError as e:
|
||||||
|
print(f"{Fore.RED}Connection error: {e}{Fore.RESET}")
|
||||||
|
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
|
||||||
def command(help: str) -> Callable[[Callable], Callable]:
|
def command(help: str) -> Callable[[Callable], Callable]:
|
||||||
def decorator(func) -> Callable:
|
def decorator(func) -> Callable:
|
||||||
questions = func.__inquirer_questions__.copy()
|
questions = getattr(func, "__inquirer_questions__", {}).copy()
|
||||||
|
|
||||||
@functools.wraps(func)
|
@functools.wraps(func)
|
||||||
def wrapper(*args, **kwargs):
|
def wrapper(*args, **kwargs):
|
||||||
@ -35,20 +70,43 @@ def command(help: str) -> Callable[[Callable], Callable]:
|
|||||||
continue
|
continue
|
||||||
if value is not None and (questions[key]["type"] != "confirm" or value != "null"):
|
if value is not None and (questions[key]["type"] != "confirm" or value != "null"):
|
||||||
questions.pop(key, None)
|
questions.pop(key, None)
|
||||||
|
try:
|
||||||
|
required_unless = questions[key].pop("required_unless")
|
||||||
|
if isinstance(required_unless, str) and kwargs[required_unless]:
|
||||||
|
questions.pop(key)
|
||||||
|
elif isinstance(required_unless, list):
|
||||||
|
for v in required_unless:
|
||||||
|
if kwargs[v]:
|
||||||
|
questions.pop(key)
|
||||||
|
break
|
||||||
|
elif isinstance(required_unless, dict):
|
||||||
|
for k, v in required_unless.items():
|
||||||
|
if kwargs.get(v, object()) == v:
|
||||||
|
questions.pop(key)
|
||||||
|
break
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
question_list = list(questions.values())
|
question_list = list(questions.values())
|
||||||
question_list.reverse()
|
question_list.reverse()
|
||||||
resp = prompt(question_list, keyboard_interrupt_msg="Aborted!")
|
resp = prompt(question_list, kbi_msg="Aborted!")
|
||||||
if not resp and question_list:
|
if not resp and question_list:
|
||||||
return
|
return
|
||||||
kwargs = {**kwargs, **resp}
|
kwargs = {**kwargs, **resp}
|
||||||
func(*args, **kwargs)
|
|
||||||
|
try:
|
||||||
|
res = func(*args, **kwargs)
|
||||||
|
if inspect.isawaitable(res):
|
||||||
|
asyncio.run(res)
|
||||||
|
except Exception:
|
||||||
|
print(Fore.RED + "Fatal error running command" + Fore.RESET)
|
||||||
|
traceback.print_exc()
|
||||||
|
|
||||||
return app.command(help=help)(wrapper)
|
return app.command(help=help)(wrapper)
|
||||||
|
|
||||||
return decorator
|
return decorator
|
||||||
|
|
||||||
|
|
||||||
def yesno(val: str) -> Optional[bool]:
|
def yesno(val: str) -> bool | None:
|
||||||
if not val:
|
if not val:
|
||||||
return None
|
return None
|
||||||
elif isinstance(val, bool):
|
elif isinstance(val, bool):
|
||||||
@ -62,13 +120,25 @@ def yesno(val: str) -> Optional[bool]:
|
|||||||
yesno.__name__ = "yes/no"
|
yesno.__name__ = "yes/no"
|
||||||
|
|
||||||
|
|
||||||
def option(short: str, long: str, message: str = None, help: str = None,
|
def option(
|
||||||
click_type: Union[str, Callable[[str], Any]] = None, inq_type: str = None,
|
short: str,
|
||||||
validator: Validator = None, required: bool = False, default: str = None,
|
long: str,
|
||||||
is_flag: bool = False, prompt: bool = True) -> Callable[[Callable], Callable]:
|
message: str = None,
|
||||||
|
help: str = None,
|
||||||
|
click_type: str | Callable[[str], Any] = None,
|
||||||
|
inq_type: str = None,
|
||||||
|
validator: type[Validator] = None,
|
||||||
|
required: bool = False,
|
||||||
|
default: str | bool | None = None,
|
||||||
|
is_flag: bool = False,
|
||||||
|
prompt: bool = True,
|
||||||
|
required_unless: str | list | dict = None,
|
||||||
|
) -> Callable[[Callable], Callable]:
|
||||||
if not message:
|
if not message:
|
||||||
message = long[2].upper() + long[3:]
|
message = long[2].upper() + long[3:]
|
||||||
click_type = validator.click_type if isinstance(validator, ClickValidator) else click_type
|
|
||||||
|
if isinstance(validator, type) and issubclass(validator, ClickValidator):
|
||||||
|
click_type = validator.click_type
|
||||||
if is_flag:
|
if is_flag:
|
||||||
click_type = yesno
|
click_type = yesno
|
||||||
|
|
||||||
@ -79,18 +149,20 @@ def option(short: str, long: str, message: str = None, help: str = None,
|
|||||||
if not hasattr(func, "__inquirer_questions__"):
|
if not hasattr(func, "__inquirer_questions__"):
|
||||||
func.__inquirer_questions__ = {}
|
func.__inquirer_questions__ = {}
|
||||||
q = {
|
q = {
|
||||||
"type": (inq_type if isinstance(inq_type, str)
|
"type": (
|
||||||
else ("input" if not is_flag
|
inq_type if isinstance(inq_type, str) else ("input" if not is_flag else "confirm")
|
||||||
else "confirm")),
|
),
|
||||||
"name": long[2:],
|
"name": long[2:],
|
||||||
"message": message,
|
"message": message,
|
||||||
}
|
}
|
||||||
|
if required_unless is not None:
|
||||||
|
q["required_unless"] = required_unless
|
||||||
if default is not None:
|
if default is not None:
|
||||||
q["default"] = default
|
q["default"] = default
|
||||||
if required:
|
if required or required_unless is not None:
|
||||||
q["validator"] = Required(validator)
|
q["validate"] = Required(validator)
|
||||||
elif validator:
|
elif validator:
|
||||||
q["validator"] = validator
|
q["validate"] = validator
|
||||||
func.__inquirer_questions__[long[2:]] = q
|
func.__inquirer_questions__[long[2:]] = q
|
||||||
return func
|
return func
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# maubot - A plugin-based Matrix bot system.
|
# maubot - A plugin-based Matrix bot system.
|
||||||
# Copyright (C) 2019 Tulir Asokan
|
# Copyright (C) 2022 Tulir Asokan
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
@ -16,9 +16,9 @@
|
|||||||
from typing import Callable
|
from typing import Callable
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from packaging.version import Version, InvalidVersion
|
from packaging.version import InvalidVersion, Version
|
||||||
from prompt_toolkit.validation import Validator, ValidationError
|
|
||||||
from prompt_toolkit.document import Document
|
from prompt_toolkit.document import Document
|
||||||
|
from prompt_toolkit.validation import ValidationError, Validator
|
||||||
import click
|
import click
|
||||||
|
|
||||||
from ..util import spdx as spdxlib
|
from ..util import spdx as spdxlib
|
||||||
|
@ -1 +1 @@
|
|||||||
from . import upload, build, login, init, logs, auth
|
from . import auth, build, init, login, logs, upload
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# maubot - A plugin-based Matrix bot system.
|
# maubot - A plugin-based Matrix bot system.
|
||||||
# Copyright (C) 2019 Tulir Asokan
|
# Copyright (C) 2022 Tulir Asokan
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
@ -13,53 +13,154 @@
|
|||||||
#
|
#
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
from urllib.request import urlopen, Request
|
|
||||||
from urllib.error import HTTPError
|
|
||||||
import json
|
import json
|
||||||
|
import webbrowser
|
||||||
|
|
||||||
from colorama import Fore
|
from colorama import Fore
|
||||||
|
from yarl import URL
|
||||||
|
import aiohttp
|
||||||
import click
|
import click
|
||||||
|
|
||||||
from ..config import get_token
|
|
||||||
from ..cliq import cliq
|
from ..cliq import cliq
|
||||||
|
|
||||||
history_count: int = 10
|
history_count: int = 10
|
||||||
|
|
||||||
|
friendly_errors = {
|
||||||
|
"server_not_found": (
|
||||||
|
"Registration target server not found.\n\n"
|
||||||
|
"To log in or register through maubot, you must add the server to the\n"
|
||||||
|
"homeservers section in the config. If you only want to log in,\n"
|
||||||
|
"leave the `secret` field empty."
|
||||||
|
),
|
||||||
|
"registration_no_sso": (
|
||||||
|
"The register operation is only for registering with a password.\n\n"
|
||||||
|
"To register with SSO, simply leave out the --register flag."
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async def list_servers(server: str, sess: aiohttp.ClientSession) -> None:
|
||||||
|
url = URL(server) / "_matrix/maubot/v1/client/auth/servers"
|
||||||
|
async with sess.get(url) as resp:
|
||||||
|
data = await resp.json()
|
||||||
|
print(f"{Fore.GREEN}Available Matrix servers for registration and login:{Fore.RESET}")
|
||||||
|
for server in data.keys():
|
||||||
|
print(f"* {Fore.CYAN}{server}{Fore.RESET}")
|
||||||
|
|
||||||
|
|
||||||
@cliq.command(help="Log into a Matrix account via the Maubot server")
|
@cliq.command(help="Log into a Matrix account via the Maubot server")
|
||||||
@cliq.option("-h", "--homeserver", help="The homeserver to log into", required=True)
|
@cliq.option("-h", "--homeserver", help="The homeserver to log into", required_unless="list")
|
||||||
@cliq.option("-u", "--username", help="The username to log in with", required=True)
|
@cliq.option(
|
||||||
@cliq.option("-p", "--password", help="The password to log in with", inq_type="password",
|
"-u", "--username", help="The username to log in with", required_unless=["list", "sso"]
|
||||||
required=True)
|
)
|
||||||
@cliq.option("-s", "--server", help="The maubot instance to log in through", default="",
|
@cliq.option(
|
||||||
required=False, prompt=False)
|
"-p",
|
||||||
@click.option("-r", "--register", help="Register instead of logging in", is_flag=True,
|
"--password",
|
||||||
default=False)
|
help="The password to log in with",
|
||||||
def auth(homeserver: str, username: str, password: str, server: str, register: bool) -> None:
|
inq_type="password",
|
||||||
server, token = get_token(server)
|
required_unless=["list", "sso"],
|
||||||
if not token:
|
)
|
||||||
|
@cliq.option(
|
||||||
|
"-s",
|
||||||
|
"--server",
|
||||||
|
help="The maubot instance to log in through",
|
||||||
|
default="",
|
||||||
|
required=False,
|
||||||
|
prompt=False,
|
||||||
|
)
|
||||||
|
@click.option(
|
||||||
|
"-r", "--register", help="Register instead of logging in", is_flag=True, default=False
|
||||||
|
)
|
||||||
|
@click.option(
|
||||||
|
"-c",
|
||||||
|
"--update-client",
|
||||||
|
help="Instead of returning the access token, create or update a client in maubot using it",
|
||||||
|
is_flag=True,
|
||||||
|
default=False,
|
||||||
|
)
|
||||||
|
@click.option("-l", "--list", help="List available homeservers", is_flag=True, default=False)
|
||||||
|
@click.option(
|
||||||
|
"-o", "--sso", help="Use single sign-on instead of password login", is_flag=True, default=False
|
||||||
|
)
|
||||||
|
@click.option(
|
||||||
|
"-n",
|
||||||
|
"--device-name",
|
||||||
|
help="The initial e2ee device displayname (only for login)",
|
||||||
|
default="Maubot",
|
||||||
|
required=False,
|
||||||
|
)
|
||||||
|
@cliq.with_authenticated_http
|
||||||
|
async def auth(
|
||||||
|
homeserver: str,
|
||||||
|
username: str,
|
||||||
|
password: str,
|
||||||
|
server: str,
|
||||||
|
register: bool,
|
||||||
|
list: bool,
|
||||||
|
update_client: bool,
|
||||||
|
device_name: str,
|
||||||
|
sso: bool,
|
||||||
|
sess: aiohttp.ClientSession,
|
||||||
|
) -> None:
|
||||||
|
if list:
|
||||||
|
await list_servers(server, sess)
|
||||||
return
|
return
|
||||||
endpoint = "register" if register else "login"
|
endpoint = "register" if register else "login"
|
||||||
req = Request(f"{server}/_matrix/maubot/v1/client/auth/{homeserver}/{endpoint}",
|
url = URL(server) / "_matrix/maubot/v1/client/auth" / homeserver / endpoint
|
||||||
headers={
|
if update_client:
|
||||||
"Authorization": f"Bearer {token}",
|
url = url.update_query({"update_client": "true"})
|
||||||
"Content-Type": "application/json",
|
if sso:
|
||||||
},
|
url = url.update_query({"sso": "true"})
|
||||||
data=json.dumps({
|
req_data = {"device_name": device_name}
|
||||||
"username": username,
|
else:
|
||||||
"password": password,
|
req_data = {"username": username, "password": password, "device_name": device_name}
|
||||||
}).encode("utf-8"))
|
|
||||||
|
async with sess.post(url, json=req_data) as resp:
|
||||||
|
if not 200 <= resp.status < 300:
|
||||||
|
await print_error(resp, is_register=register)
|
||||||
|
elif sso:
|
||||||
|
await wait_sso(resp, sess, server, homeserver)
|
||||||
|
else:
|
||||||
|
await print_response(resp, is_register=register)
|
||||||
|
|
||||||
|
|
||||||
|
async def wait_sso(
|
||||||
|
resp: aiohttp.ClientResponse, sess: aiohttp.ClientSession, server: str, homeserver: str
|
||||||
|
) -> None:
|
||||||
|
data = await resp.json()
|
||||||
|
sso_url, reg_id = data["sso_url"], data["id"]
|
||||||
|
print(f"{Fore.GREEN}Opening {Fore.CYAN}{sso_url}{Fore.RESET}")
|
||||||
|
webbrowser.open(sso_url, autoraise=True)
|
||||||
|
print(f"{Fore.GREEN}Waiting for login token...{Fore.RESET}")
|
||||||
|
wait_url = URL(server) / "_matrix/maubot/v1/client/auth" / homeserver / "sso" / reg_id / "wait"
|
||||||
|
async with sess.post(wait_url, json={}) as resp:
|
||||||
|
await print_response(resp, is_register=False)
|
||||||
|
|
||||||
|
|
||||||
|
async def print_response(resp: aiohttp.ClientResponse, is_register: bool) -> None:
|
||||||
|
if resp.status == 200:
|
||||||
|
data = await resp.json()
|
||||||
|
action = "registered" if is_register else "logged in as"
|
||||||
|
print(f"{Fore.GREEN}Successfully {action} {Fore.CYAN}{data['user_id']}{Fore.GREEN}.")
|
||||||
|
print(f"{Fore.GREEN}Access token: {Fore.CYAN}{data['access_token']}{Fore.RESET}")
|
||||||
|
print(f"{Fore.GREEN}Device ID: {Fore.CYAN}{data['device_id']}{Fore.RESET}")
|
||||||
|
elif resp.status in (201, 202):
|
||||||
|
data = await resp.json()
|
||||||
|
action = "created" if resp.status == 201 else "updated"
|
||||||
|
print(
|
||||||
|
f"{Fore.GREEN}Successfully {action} client for "
|
||||||
|
f"{Fore.CYAN}{data['id']}{Fore.GREEN} / "
|
||||||
|
f"{Fore.CYAN}{data['device_id']}{Fore.GREEN}.{Fore.RESET}"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
await print_error(resp, is_register)
|
||||||
|
|
||||||
|
|
||||||
|
async def print_error(resp: aiohttp.ClientResponse, is_register: bool) -> None:
|
||||||
try:
|
try:
|
||||||
with urlopen(req) as resp_data:
|
err_data = await resp.json()
|
||||||
resp = json.load(resp_data)
|
error = friendly_errors.get(err_data["errcode"], err_data["error"])
|
||||||
action = "registered" if register else "logged in as"
|
except (aiohttp.ContentTypeError, json.JSONDecodeError, KeyError):
|
||||||
print(f"{Fore.GREEN}Successfully {action} "
|
error = await resp.text()
|
||||||
f"{Fore.CYAN}{resp['user_id']}{Fore.GREEN}.")
|
action = "register" if is_register else "log in"
|
||||||
print(f"{Fore.GREEN}Access token: {Fore.CYAN}{resp['access_token']}{Fore.RESET}")
|
print(f"{Fore.RED}Failed to {action}: {error}{Fore.RESET}")
|
||||||
except HTTPError as e:
|
|
||||||
try:
|
|
||||||
err = json.load(e)
|
|
||||||
except json.JSONDecodeError:
|
|
||||||
err = {}
|
|
||||||
action = "register" if register else "log in"
|
|
||||||
print(f"{Fore.RED}Failed to {action}: {err.get('error', str(e))}{Fore.RESET}")
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# maubot - A plugin-based Matrix bot system.
|
# maubot - A plugin-based Matrix bot system.
|
||||||
# Copyright (C) 2019 Tulir Asokan
|
# Copyright (C) 2022 Tulir Asokan
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
@ -13,21 +13,28 @@
|
|||||||
#
|
#
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
from typing import Optional, Union, IO
|
from __future__ import annotations
|
||||||
from io import BytesIO
|
|
||||||
import zipfile
|
|
||||||
import os
|
|
||||||
|
|
||||||
from mautrix.client.api.types.util import SerializerError
|
from typing import IO
|
||||||
from ruamel.yaml import YAML, YAMLError
|
from io import BytesIO
|
||||||
|
import asyncio
|
||||||
|
import glob
|
||||||
|
import os
|
||||||
|
import zipfile
|
||||||
|
|
||||||
|
from aiohttp import ClientSession
|
||||||
from colorama import Fore
|
from colorama import Fore
|
||||||
from PyInquirer import prompt
|
from questionary import prompt
|
||||||
|
from ruamel.yaml import YAML, YAMLError
|
||||||
import click
|
import click
|
||||||
|
|
||||||
|
from mautrix.types import SerializerError
|
||||||
|
|
||||||
from ...loader import PluginMeta
|
from ...loader import PluginMeta
|
||||||
from ..cliq.validators import PathValidator
|
|
||||||
from ..base import app
|
from ..base import app
|
||||||
from ..config import get_default_server, get_token
|
from ..cliq import cliq
|
||||||
|
from ..cliq.validators import PathValidator
|
||||||
|
from ..config import get_token
|
||||||
from .upload import upload_file
|
from .upload import upload_file
|
||||||
|
|
||||||
yaml = YAML()
|
yaml = YAML()
|
||||||
@ -39,7 +46,7 @@ def zipdir(zip, dir):
|
|||||||
zip.write(os.path.join(root, file))
|
zip.write(os.path.join(root, file))
|
||||||
|
|
||||||
|
|
||||||
def read_meta(path: str) -> Optional[PluginMeta]:
|
def read_meta(path: str) -> PluginMeta | None:
|
||||||
try:
|
try:
|
||||||
with open(os.path.join(path, "maubot.yaml")) as meta_file:
|
with open(os.path.join(path, "maubot.yaml")) as meta_file:
|
||||||
try:
|
try:
|
||||||
@ -60,7 +67,7 @@ def read_meta(path: str) -> Optional[PluginMeta]:
|
|||||||
return meta
|
return meta
|
||||||
|
|
||||||
|
|
||||||
def read_output_path(output: str, meta: PluginMeta) -> Optional[str]:
|
def read_output_path(output: str, meta: PluginMeta) -> str | None:
|
||||||
directory = os.getcwd()
|
directory = os.getcwd()
|
||||||
filename = f"{meta.id}-v{meta.version}.mbp"
|
filename = f"{meta.id}-v{meta.version}.mbp"
|
||||||
if not output:
|
if not output:
|
||||||
@ -68,18 +75,15 @@ def read_output_path(output: str, meta: PluginMeta) -> Optional[str]:
|
|||||||
elif os.path.isdir(output):
|
elif os.path.isdir(output):
|
||||||
output = os.path.join(output, filename)
|
output = os.path.join(output, filename)
|
||||||
elif os.path.exists(output):
|
elif os.path.exists(output):
|
||||||
override = prompt({
|
q = [{"type": "confirm", "name": "override", "message": f"{output} exists, override?"}]
|
||||||
"type": "confirm",
|
override = prompt(q)["override"]
|
||||||
"name": "override",
|
|
||||||
"message": f"{output} exists, override?"
|
|
||||||
})["override"]
|
|
||||||
if not override:
|
if not override:
|
||||||
return None
|
return None
|
||||||
os.remove(output)
|
os.remove(output)
|
||||||
return os.path.abspath(output)
|
return os.path.abspath(output)
|
||||||
|
|
||||||
|
|
||||||
def write_plugin(meta: PluginMeta, output: Union[str, IO]) -> None:
|
def write_plugin(meta: PluginMeta, output: str | IO) -> None:
|
||||||
with zipfile.ZipFile(output, "w") as zip:
|
with zipfile.ZipFile(output, "w") as zip:
|
||||||
meta_dump = BytesIO()
|
meta_dump = BytesIO()
|
||||||
yaml.dump(meta.serialize(), meta_dump)
|
yaml.dump(meta.serialize(), meta_dump)
|
||||||
@ -89,33 +93,47 @@ def write_plugin(meta: PluginMeta, output: Union[str, IO]) -> None:
|
|||||||
if os.path.isfile(f"{module}.py"):
|
if os.path.isfile(f"{module}.py"):
|
||||||
zip.write(f"{module}.py")
|
zip.write(f"{module}.py")
|
||||||
elif module is not None and os.path.isdir(module):
|
elif module is not None and os.path.isdir(module):
|
||||||
|
if os.path.isfile(f"{module}/__init__.py"):
|
||||||
zipdir(zip, module)
|
zipdir(zip, module)
|
||||||
|
else:
|
||||||
|
print(
|
||||||
|
Fore.YELLOW
|
||||||
|
+ f"Module {module} is missing __init__.py, skipping"
|
||||||
|
+ Fore.RESET
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
print(Fore.YELLOW + f"Module {module} not found, skipping" + Fore.RESET)
|
print(Fore.YELLOW + f"Module {module} not found, skipping" + Fore.RESET)
|
||||||
|
for pattern in meta.extra_files:
|
||||||
for file in meta.extra_files:
|
for file in glob.iglob(pattern):
|
||||||
zip.write(file)
|
zip.write(file)
|
||||||
|
|
||||||
|
|
||||||
def upload_plugin(output: Union[str, IO], server: str) -> None:
|
@cliq.with_authenticated_http
|
||||||
|
async def upload_plugin(output: str | IO, *, server: str, sess: ClientSession) -> None:
|
||||||
server, token = get_token(server)
|
server, token = get_token(server)
|
||||||
if not token:
|
if not token:
|
||||||
return
|
return
|
||||||
if isinstance(output, str):
|
if isinstance(output, str):
|
||||||
with open(output, "rb") as file:
|
with open(output, "rb") as file:
|
||||||
upload_file(file, server, token)
|
await upload_file(sess, file, server)
|
||||||
else:
|
else:
|
||||||
upload_file(output, server, token)
|
await upload_file(sess, output, server)
|
||||||
|
|
||||||
|
|
||||||
@app.command(short_help="Build a maubot plugin",
|
@app.command(
|
||||||
help="Build a maubot plugin. First parameter is the path to root of the plugin "
|
short_help="Build a maubot plugin",
|
||||||
"to build. You can also use --output to specify output file.")
|
help=(
|
||||||
|
"Build a maubot plugin. First parameter is the path to root of the plugin "
|
||||||
|
"to build. You can also use --output to specify output file."
|
||||||
|
),
|
||||||
|
)
|
||||||
@click.argument("path", default=os.getcwd())
|
@click.argument("path", default=os.getcwd())
|
||||||
@click.option("-o", "--output", help="Path to output built plugin to",
|
@click.option(
|
||||||
type=PathValidator.click_type)
|
"-o", "--output", help="Path to output built plugin to", type=PathValidator.click_type
|
||||||
@click.option("-u", "--upload", help="Upload plugin to server after building", is_flag=True,
|
)
|
||||||
default=False)
|
@click.option(
|
||||||
|
"-u", "--upload", help="Upload plugin to server after building", is_flag=True, default=False
|
||||||
|
)
|
||||||
@click.option("-s", "--server", help="Server to upload built plugin to")
|
@click.option("-s", "--server", help="Server to upload built plugin to")
|
||||||
def build(path: str, output: str, upload: bool, server: str) -> None:
|
def build(path: str, output: str, upload: bool, server: str) -> None:
|
||||||
meta = read_meta(path)
|
meta = read_meta(path)
|
||||||
@ -134,4 +152,4 @@ def build(path: str, output: str, upload: bool, server: str) -> None:
|
|||||||
else:
|
else:
|
||||||
output.seek(0)
|
output.seek(0)
|
||||||
if upload:
|
if upload:
|
||||||
upload_plugin(output, server)
|
asyncio.run(upload_plugin(output, server=server))
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# maubot - A plugin-based Matrix bot system.
|
# maubot - A plugin-based Matrix bot system.
|
||||||
# Copyright (C) 2019 Tulir Asokan
|
# Copyright (C) 2022 Tulir Asokan
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
@ -13,11 +13,11 @@
|
|||||||
#
|
#
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
from pkg_resources import resource_string
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from packaging.version import Version
|
|
||||||
from jinja2 import Template
|
from jinja2 import Template
|
||||||
|
from packaging.version import Version
|
||||||
|
from pkg_resources import resource_string
|
||||||
|
|
||||||
from .. import cliq
|
from .. import cliq
|
||||||
from ..cliq import SPDXValidator, VersionValidator
|
from ..cliq import SPDXValidator, VersionValidator
|
||||||
@ -40,26 +40,55 @@ def load_templates():
|
|||||||
|
|
||||||
|
|
||||||
@cliq.command(help="Initialize a new maubot plugin")
|
@cliq.command(help="Initialize a new maubot plugin")
|
||||||
@cliq.option("-n", "--name", help="The name of the project", required=True,
|
@cliq.option(
|
||||||
default=os.path.basename(os.getcwd()))
|
"-n",
|
||||||
@cliq.option("-i", "--id", message="ID", required=True,
|
"--name",
|
||||||
help="The maubot plugin ID (Java package name format)")
|
help="The name of the project",
|
||||||
@cliq.option("-v", "--version", help="Initial version for project (PEP-440 format)",
|
required=True,
|
||||||
default="0.1.0", validator=VersionValidator, required=True)
|
default=os.path.basename(os.getcwd()),
|
||||||
@cliq.option("-l", "--license", validator=SPDXValidator, default="AGPL-3.0-or-later",
|
)
|
||||||
help="The license for the project (SPDX identifier)", required=False)
|
@cliq.option(
|
||||||
@cliq.option("-c", "--config", message="Should the plugin include a config?",
|
"-i",
|
||||||
help="Include a config in the plugin stub", default=False, is_flag=True)
|
"--id",
|
||||||
|
message="ID",
|
||||||
|
required=True,
|
||||||
|
help="The maubot plugin ID (Java package name format)",
|
||||||
|
)
|
||||||
|
@cliq.option(
|
||||||
|
"-v",
|
||||||
|
"--version",
|
||||||
|
help="Initial version for project (PEP-440 format)",
|
||||||
|
default="0.1.0",
|
||||||
|
validator=VersionValidator,
|
||||||
|
required=True,
|
||||||
|
)
|
||||||
|
@cliq.option(
|
||||||
|
"-l",
|
||||||
|
"--license",
|
||||||
|
validator=SPDXValidator,
|
||||||
|
default="AGPL-3.0-or-later",
|
||||||
|
help="The license for the project (SPDX identifier)",
|
||||||
|
required=False,
|
||||||
|
)
|
||||||
|
@cliq.option(
|
||||||
|
"-c",
|
||||||
|
"--config",
|
||||||
|
message="Should the plugin include a config?",
|
||||||
|
help="Include a config in the plugin stub",
|
||||||
|
default=False,
|
||||||
|
is_flag=True,
|
||||||
|
)
|
||||||
def init(name: str, id: str, version: Version, license: str, config: bool) -> None:
|
def init(name: str, id: str, version: Version, license: str, config: bool) -> None:
|
||||||
load_templates()
|
load_templates()
|
||||||
main_class = name[0].upper() + name[1:]
|
main_class = name[0].upper() + name[1:]
|
||||||
meta = meta_template.render(id=id, version=str(version), license=license, config=config,
|
meta = meta_template.render(
|
||||||
main_class=main_class)
|
id=id, version=str(version), license=license, config=config, main_class=main_class
|
||||||
|
)
|
||||||
with open("maubot.yaml", "w") as file:
|
with open("maubot.yaml", "w") as file:
|
||||||
file.write(meta)
|
file.write(meta)
|
||||||
if license:
|
if license:
|
||||||
with open("LICENSE", "w") as file:
|
with open("LICENSE", "w") as file:
|
||||||
file.write(spdx.get(license)["text"])
|
file.write(spdx.get(license)["licenseText"])
|
||||||
if not os.path.isdir(name):
|
if not os.path.isdir(name):
|
||||||
os.mkdir(name)
|
os.mkdir(name)
|
||||||
mod = mod_template.render(config=config, name=main_class)
|
mod = mod_template.render(config=config, name=main_class)
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# maubot - A plugin-based Matrix bot system.
|
# maubot - A plugin-based Matrix bot system.
|
||||||
# Copyright (C) 2019 Tulir Asokan
|
# Copyright (C) 2022 Tulir Asokan
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
@ -13,37 +13,65 @@
|
|||||||
#
|
#
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
from urllib.request import urlopen
|
|
||||||
from urllib.error import HTTPError
|
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from colorama import Fore
|
from colorama import Fore
|
||||||
|
from yarl import URL
|
||||||
|
import aiohttp
|
||||||
|
|
||||||
from ..config import save_config, config
|
|
||||||
from ..cliq import cliq
|
from ..cliq import cliq
|
||||||
|
from ..config import config, save_config
|
||||||
|
|
||||||
|
|
||||||
@cliq.command(help="Log in to a Maubot instance")
|
@cliq.command(help="Log in to a Maubot instance")
|
||||||
@cliq.option("-u", "--username", help="The username of your account", default=os.environ.get("USER", None), required=True)
|
@cliq.option(
|
||||||
@cliq.option("-p", "--password", help="The password to your account", inq_type="password", required=True)
|
"-u",
|
||||||
@cliq.option("-s", "--server", help="The server to log in to", default="http://localhost:29316", required=True)
|
"--username",
|
||||||
def login(server, username, password) -> None:
|
help="The username of your account",
|
||||||
|
default=os.environ.get("USER", None),
|
||||||
|
required=True,
|
||||||
|
)
|
||||||
|
@cliq.option(
|
||||||
|
"-p", "--password", help="The password to your account", inq_type="password", required=True
|
||||||
|
)
|
||||||
|
@cliq.option(
|
||||||
|
"-s",
|
||||||
|
"--server",
|
||||||
|
help="The server to log in to",
|
||||||
|
default="http://localhost:29316",
|
||||||
|
required=True,
|
||||||
|
)
|
||||||
|
@cliq.option(
|
||||||
|
"-a",
|
||||||
|
"--alias",
|
||||||
|
help="Alias to reference the server without typing the full URL",
|
||||||
|
default="",
|
||||||
|
required=False,
|
||||||
|
)
|
||||||
|
@cliq.with_http
|
||||||
|
async def login(
|
||||||
|
server: str, username: str, password: str, alias: str, sess: aiohttp.ClientSession
|
||||||
|
) -> None:
|
||||||
data = {
|
data = {
|
||||||
"username": username,
|
"username": username,
|
||||||
"password": password,
|
"password": password,
|
||||||
}
|
}
|
||||||
try:
|
url = URL(server) / "_matrix/maubot/v1/auth/login"
|
||||||
with urlopen(f"{server}/_matrix/maubot/v1/auth/login",
|
async with sess.post(url, json=data) as resp:
|
||||||
data=json.dumps(data).encode("utf-8")) as resp_data:
|
if resp.status == 200:
|
||||||
resp = json.load(resp_data)
|
data = await resp.json()
|
||||||
config["servers"][server] = resp["token"]
|
config["servers"][server] = data["token"]
|
||||||
|
if not config["default_server"]:
|
||||||
|
print(Fore.CYAN, "Setting", server, "as the default server")
|
||||||
config["default_server"] = server
|
config["default_server"] = server
|
||||||
|
if alias:
|
||||||
|
config["aliases"][alias] = server
|
||||||
save_config()
|
save_config()
|
||||||
print(Fore.GREEN + "Logged in successfully")
|
print(Fore.GREEN + "Logged in successfully")
|
||||||
except HTTPError as e:
|
else:
|
||||||
try:
|
try:
|
||||||
err = json.load(e)
|
err = (await resp.json())["error"]
|
||||||
except json.JSONDecodeError:
|
except (json.JSONDecodeError, KeyError):
|
||||||
err = {}
|
err = await resp.text()
|
||||||
print(Fore.RED + err.get("error", str(e)) + Fore.RESET)
|
print(Fore.RED + err + Fore.RESET)
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# maubot - A plugin-based Matrix bot system.
|
# maubot - A plugin-based Matrix bot system.
|
||||||
# Copyright (C) 2019 Tulir Asokan
|
# Copyright (C) 2022 Tulir Asokan
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
@ -16,13 +16,14 @@
|
|||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
import asyncio
|
import asyncio
|
||||||
|
|
||||||
|
from aiohttp import ClientSession, WSMessage, WSMsgType
|
||||||
from colorama import Fore
|
from colorama import Fore
|
||||||
from aiohttp import WSMsgType, WSMessage, ClientSession
|
|
||||||
from mautrix.client.api.types.util import Obj
|
|
||||||
import click
|
import click
|
||||||
|
|
||||||
from ..config import get_token
|
from mautrix.types import Obj
|
||||||
|
|
||||||
from ..base import app
|
from ..base import app
|
||||||
|
from ..config import get_token
|
||||||
|
|
||||||
history_count: int = 10
|
history_count: int = 10
|
||||||
|
|
||||||
@ -37,19 +38,13 @@ def logs(server: str, tail: int) -> None:
|
|||||||
global history_count
|
global history_count
|
||||||
history_count = tail
|
history_count = tail
|
||||||
loop = asyncio.get_event_loop()
|
loop = asyncio.get_event_loop()
|
||||||
future = asyncio.ensure_future(view_logs(server, token), loop=loop)
|
loop.run_until_complete(view_logs(server, token))
|
||||||
try:
|
|
||||||
loop.run_until_complete(future)
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
future.cancel()
|
|
||||||
loop.run_until_complete(future)
|
|
||||||
loop.close()
|
|
||||||
|
|
||||||
|
|
||||||
def parsedate(entry: Obj) -> None:
|
def parsedate(entry: Obj) -> None:
|
||||||
i = entry.time.index("+")
|
i = entry.time.index("+")
|
||||||
i = entry.time.index(":", i)
|
i = entry.time.index(":", i)
|
||||||
entry.time = entry.time[:i] + entry.time[i + 1:]
|
entry.time = entry.time[:i] + entry.time[i + 1 :]
|
||||||
entry.time = datetime.strptime(entry.time, "%Y-%m-%dT%H:%M:%S.%f%z")
|
entry.time = datetime.strptime(entry.time, "%Y-%m-%dT%H:%M:%S.%f%z")
|
||||||
|
|
||||||
|
|
||||||
@ -65,13 +60,16 @@ levelcolors = {
|
|||||||
def print_entry(entry: dict) -> None:
|
def print_entry(entry: dict) -> None:
|
||||||
entry = Obj(**entry)
|
entry = Obj(**entry)
|
||||||
parsedate(entry)
|
parsedate(entry)
|
||||||
print("{levelcolor}[{date}] [{level}@{logger}] {message}{resetcolor}"
|
print(
|
||||||
.format(date=entry.time.strftime("%Y-%m-%d %H:%M:%S"),
|
"{levelcolor}[{date}] [{level}@{logger}] {message}{resetcolor}".format(
|
||||||
|
date=entry.time.strftime("%Y-%m-%d %H:%M:%S"),
|
||||||
level=entry.levelname,
|
level=entry.levelname,
|
||||||
levelcolor=levelcolors.get(entry.levelname, ""),
|
levelcolor=levelcolors.get(entry.levelname, ""),
|
||||||
resetcolor=Fore.RESET,
|
resetcolor=Fore.RESET,
|
||||||
logger=entry.name,
|
logger=entry.name,
|
||||||
message=entry.msg))
|
message=entry.msg,
|
||||||
|
)
|
||||||
|
)
|
||||||
if entry.exc_info:
|
if entry.exc_info:
|
||||||
print(entry.exc_info)
|
print(entry.exc_info)
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# maubot - A plugin-based Matrix bot system.
|
# maubot - A plugin-based Matrix bot system.
|
||||||
# Copyright (C) 2019 Tulir Asokan
|
# Copyright (C) 2022 Tulir Asokan
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
@ -13,45 +13,46 @@
|
|||||||
#
|
#
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
from urllib.request import urlopen, Request
|
|
||||||
from urllib.error import HTTPError
|
|
||||||
from typing import IO
|
from typing import IO
|
||||||
import json
|
import json
|
||||||
|
|
||||||
from colorama import Fore
|
from colorama import Fore
|
||||||
|
from yarl import URL
|
||||||
|
import aiohttp
|
||||||
import click
|
import click
|
||||||
|
|
||||||
from ..base import app
|
from ..cliq import cliq
|
||||||
from ..config import get_default_server, get_token
|
|
||||||
|
|
||||||
|
|
||||||
class UploadError(Exception):
|
class UploadError(Exception):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
@app.command(help="Upload a maubot plugin")
|
@cliq.command(help="Upload a maubot plugin")
|
||||||
@click.argument("path")
|
@click.argument("path")
|
||||||
@click.option("-s", "--server", help="The maubot instance to upload the plugin to")
|
@click.option("-s", "--server", help="The maubot instance to upload the plugin to")
|
||||||
def upload(path: str, server: str) -> None:
|
@cliq.with_authenticated_http
|
||||||
server, token = get_token(server)
|
async def upload(path: str, server: str, sess: aiohttp.ClientSession) -> None:
|
||||||
if not token:
|
|
||||||
return
|
|
||||||
with open(path, "rb") as file:
|
with open(path, "rb") as file:
|
||||||
upload_file(file, server, token)
|
await upload_file(sess, file, server)
|
||||||
|
|
||||||
|
|
||||||
def upload_file(file: IO, server: str, token: str) -> None:
|
async def upload_file(sess: aiohttp.ClientSession, file: IO, server: str) -> None:
|
||||||
req = Request(f"{server}/_matrix/maubot/v1/plugins/upload?allow_override=true", data=file,
|
url = (URL(server) / "_matrix/maubot/v1/plugins/upload").with_query({"allow_override": "true"})
|
||||||
headers={"Authorization": f"Bearer {token}", "Content-Type": "application/zip"})
|
headers = {"Content-Type": "application/zip"}
|
||||||
|
async with sess.post(url, data=file, headers=headers) as resp:
|
||||||
|
if resp.status in (200, 201):
|
||||||
|
data = await resp.json()
|
||||||
|
print(
|
||||||
|
f"{Fore.GREEN}Plugin {Fore.CYAN}{data['id']} v{data['version']}{Fore.GREEN} "
|
||||||
|
f"uploaded to {Fore.CYAN}{server}{Fore.GREEN} successfully.{Fore.RESET}"
|
||||||
|
)
|
||||||
|
else:
|
||||||
try:
|
try:
|
||||||
with urlopen(req) as resp_data:
|
err = await resp.json()
|
||||||
resp = json.load(resp_data)
|
if "stacktrace" in err:
|
||||||
print(f"{Fore.GREEN}Plugin {Fore.CYAN}{resp['id']} v{resp['version']}{Fore.GREEN} "
|
print(err["stacktrace"])
|
||||||
f"uploaded to {Fore.CYAN}{server}{Fore.GREEN} successfully.{Fore.RESET}")
|
err = err["error"]
|
||||||
except HTTPError as e:
|
except (aiohttp.ContentTypeError, json.JSONDecodeError, KeyError):
|
||||||
try:
|
err = await resp.text()
|
||||||
err = json.load(e)
|
print(f"{Fore.RED}Failed to upload plugin: {err}{Fore.RESET}")
|
||||||
except json.JSONDecodeError:
|
|
||||||
err = {}
|
|
||||||
print(err.get("stacktrace", ""))
|
|
||||||
print(Fore.RED + "Failed to upload plugin: " + err.get("error", str(e)) + Fore.RESET)
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# maubot - A plugin-based Matrix bot system.
|
# maubot - A plugin-based Matrix bot system.
|
||||||
# Copyright (C) 2019 Tulir Asokan
|
# Copyright (C) 2022 Tulir Asokan
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
@ -13,37 +13,50 @@
|
|||||||
#
|
#
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
from typing import Tuple, Optional, Dict, Any
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Any
|
||||||
import json
|
import json
|
||||||
import os
|
import os
|
||||||
|
|
||||||
from colorama import Fore
|
from colorama import Fore
|
||||||
|
|
||||||
config: Dict[str, Any] = {
|
config: dict[str, Any] = {
|
||||||
"servers": {},
|
"servers": {},
|
||||||
|
"aliases": {},
|
||||||
"default_server": None,
|
"default_server": None,
|
||||||
}
|
}
|
||||||
configdir = os.environ.get("XDG_CONFIG_HOME", os.path.join(os.environ.get("HOME"), ".config"))
|
configdir = os.environ.get("XDG_CONFIG_HOME", os.path.join(os.environ.get("HOME"), ".config"))
|
||||||
|
|
||||||
|
|
||||||
def get_default_server() -> Tuple[Optional[str], Optional[str]]:
|
def get_default_server() -> tuple[str | None, str | None]:
|
||||||
try:
|
try:
|
||||||
server: Optional[str] = config["default_server"]
|
server: str < None = config["default_server"]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
server = None
|
server = None
|
||||||
if server is None:
|
if server is None:
|
||||||
print(f"{Fore.RED}Default server not configured.{Fore.RESET}")
|
print(f"{Fore.RED}Default server not configured.{Fore.RESET}")
|
||||||
|
print(f"Perhaps you forgot to {Fore.CYAN}mbc login{Fore.RESET}?")
|
||||||
return None, None
|
return None, None
|
||||||
return server, _get_token(server)
|
return server, _get_token(server)
|
||||||
|
|
||||||
|
|
||||||
def get_token(server: str) -> Tuple[Optional[str], Optional[str]]:
|
def get_token(server: str) -> tuple[str | None, str | None]:
|
||||||
if not server:
|
if not server:
|
||||||
return get_default_server()
|
return get_default_server()
|
||||||
|
if server in config["aliases"]:
|
||||||
|
server = config["aliases"][server]
|
||||||
return server, _get_token(server)
|
return server, _get_token(server)
|
||||||
|
|
||||||
|
|
||||||
def _get_token(server: str) -> Optional[str]:
|
def _resolve_alias(alias: str) -> str | None:
|
||||||
|
try:
|
||||||
|
return config["aliases"][alias]
|
||||||
|
except KeyError:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _get_token(server: str) -> str | None:
|
||||||
try:
|
try:
|
||||||
return config["servers"][server]
|
return config["servers"][server]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
@ -60,7 +73,8 @@ def load_config() -> None:
|
|||||||
try:
|
try:
|
||||||
with open(f"{configdir}/maubot-cli.json") as file:
|
with open(f"{configdir}/maubot-cli.json") as file:
|
||||||
loaded = json.load(file)
|
loaded = json.load(file)
|
||||||
config["servers"] = loaded["servers"]
|
config["servers"] = loaded.get("servers", {})
|
||||||
config["default_server"] = loaded["default_server"]
|
config["aliases"] = loaded.get("aliases", {})
|
||||||
|
config["default_server"] = loaded.get("default_server", None)
|
||||||
except FileNotFoundError:
|
except FileNotFoundError:
|
||||||
pass
|
pass
|
||||||
|
Binary file not shown.
@ -1,5 +1,5 @@
|
|||||||
# maubot - A plugin-based Matrix bot system.
|
# maubot - A plugin-based Matrix bot system.
|
||||||
# Copyright (C) 2019 Tulir Asokan
|
# Copyright (C) 2022 Tulir Asokan
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
@ -13,12 +13,14 @@
|
|||||||
#
|
#
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
from typing import Dict
|
from __future__ import annotations
|
||||||
import zipfile
|
|
||||||
import pkg_resources
|
|
||||||
import json
|
|
||||||
|
|
||||||
spdx_list = None
|
import json
|
||||||
|
import zipfile
|
||||||
|
|
||||||
|
import pkg_resources
|
||||||
|
|
||||||
|
spdx_list: dict[str, dict[str, str]] | None = None
|
||||||
|
|
||||||
|
|
||||||
def load() -> None:
|
def load() -> None:
|
||||||
@ -31,13 +33,13 @@ def load() -> None:
|
|||||||
spdx_list = json.load(file)
|
spdx_list = json.load(file)
|
||||||
|
|
||||||
|
|
||||||
def get(id: str) -> Dict[str, str]:
|
def get(id: str) -> dict[str, str]:
|
||||||
if not spdx_list:
|
if not spdx_list:
|
||||||
load()
|
load()
|
||||||
return spdx_list[id.lower()]
|
return spdx_list[id]
|
||||||
|
|
||||||
|
|
||||||
def valid(id: str) -> bool:
|
def valid(id: str) -> bool:
|
||||||
if not spdx_list:
|
if not spdx_list:
|
||||||
load()
|
load()
|
||||||
return id.lower() in spdx_list
|
return id in spdx_list
|
||||||
|
506
maubot/client.py
506
maubot/client.py
@ -1,5 +1,5 @@
|
|||||||
# maubot - A plugin-based Matrix bot system.
|
# maubot - A plugin-based Matrix bot system.
|
||||||
# Copyright (C) 2019 Tulir Asokan
|
# Copyright (C) 2022 Tulir Asokan
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
@ -13,69 +13,203 @@
|
|||||||
#
|
#
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
from typing import Dict, Iterable, Optional, Set, Callable, Any, Awaitable, TYPE_CHECKING
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import TYPE_CHECKING, Any, AsyncGenerator, Awaitable, Callable, cast
|
||||||
|
from collections import defaultdict
|
||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from aiohttp import ClientSession
|
from aiohttp import ClientSession
|
||||||
|
|
||||||
from mautrix.errors import MatrixInvalidToken, MatrixRequestError
|
|
||||||
from mautrix.types import (UserID, SyncToken, FilterID, ContentURI, StrippedStateEvent, Membership,
|
|
||||||
StateEvent, EventType, Filter, RoomFilter, RoomEventFilter)
|
|
||||||
from mautrix.client import InternalEventType
|
from mautrix.client import InternalEventType
|
||||||
|
from mautrix.errors import MatrixInvalidToken
|
||||||
|
from mautrix.types import (
|
||||||
|
ContentURI,
|
||||||
|
DeviceID,
|
||||||
|
EventFilter,
|
||||||
|
EventType,
|
||||||
|
Filter,
|
||||||
|
FilterID,
|
||||||
|
Membership,
|
||||||
|
PresenceState,
|
||||||
|
RoomEventFilter,
|
||||||
|
RoomFilter,
|
||||||
|
StateEvent,
|
||||||
|
StateFilter,
|
||||||
|
StrippedStateEvent,
|
||||||
|
SyncToken,
|
||||||
|
UserID,
|
||||||
|
)
|
||||||
|
from mautrix.util import background_task
|
||||||
|
from mautrix.util.async_getter_lock import async_getter_lock
|
||||||
|
from mautrix.util.logging import TraceLogger
|
||||||
|
|
||||||
from .lib.store_proxy import ClientStoreProxy
|
from .db import Client as DBClient
|
||||||
from .db import DBClient
|
|
||||||
from .matrix import MaubotMatrixClient
|
from .matrix import MaubotMatrixClient
|
||||||
|
|
||||||
|
try:
|
||||||
|
from mautrix.crypto import OlmMachine, PgCryptoStore
|
||||||
|
|
||||||
|
crypto_import_error = None
|
||||||
|
except ImportError as e:
|
||||||
|
OlmMachine = PgCryptoStore = None
|
||||||
|
crypto_import_error = e
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
|
from .__main__ import Maubot
|
||||||
from .instance import PluginInstance
|
from .instance import PluginInstance
|
||||||
|
|
||||||
log = logging.getLogger("maubot.client")
|
|
||||||
|
|
||||||
|
class Client(DBClient):
|
||||||
|
maubot: "Maubot" = None
|
||||||
|
cache: dict[UserID, Client] = {}
|
||||||
|
_async_get_locks: dict[Any, asyncio.Lock] = defaultdict(lambda: asyncio.Lock())
|
||||||
|
log: TraceLogger = logging.getLogger("maubot.client")
|
||||||
|
|
||||||
class Client:
|
|
||||||
log: logging.Logger = None
|
|
||||||
loop: asyncio.AbstractEventLoop = None
|
|
||||||
cache: Dict[UserID, 'Client'] = {}
|
|
||||||
http_client: ClientSession = None
|
http_client: ClientSession = None
|
||||||
|
|
||||||
references: Set['PluginInstance']
|
references: set[PluginInstance]
|
||||||
db_instance: DBClient
|
|
||||||
client: MaubotMatrixClient
|
client: MaubotMatrixClient
|
||||||
|
crypto: OlmMachine | None
|
||||||
|
crypto_store: PgCryptoStore | None
|
||||||
started: bool
|
started: bool
|
||||||
|
sync_ok: bool
|
||||||
|
|
||||||
remote_displayname: Optional[str]
|
remote_displayname: str | None
|
||||||
remote_avatar_url: Optional[ContentURI]
|
remote_avatar_url: ContentURI | None
|
||||||
|
|
||||||
def __init__(self, db_instance: DBClient) -> None:
|
def __init__(
|
||||||
self.db_instance = db_instance
|
self,
|
||||||
|
id: UserID,
|
||||||
|
homeserver: str,
|
||||||
|
access_token: str,
|
||||||
|
device_id: DeviceID,
|
||||||
|
enabled: bool = False,
|
||||||
|
next_batch: SyncToken = "",
|
||||||
|
filter_id: FilterID = "",
|
||||||
|
sync: bool = True,
|
||||||
|
autojoin: bool = True,
|
||||||
|
online: bool = True,
|
||||||
|
displayname: str = "disable",
|
||||||
|
avatar_url: str = "disable",
|
||||||
|
) -> None:
|
||||||
|
super().__init__(
|
||||||
|
id=id,
|
||||||
|
homeserver=homeserver,
|
||||||
|
access_token=access_token,
|
||||||
|
device_id=device_id,
|
||||||
|
enabled=bool(enabled),
|
||||||
|
next_batch=next_batch,
|
||||||
|
filter_id=filter_id,
|
||||||
|
sync=bool(sync),
|
||||||
|
autojoin=bool(autojoin),
|
||||||
|
online=bool(online),
|
||||||
|
displayname=displayname,
|
||||||
|
avatar_url=avatar_url,
|
||||||
|
)
|
||||||
|
self._postinited = False
|
||||||
|
|
||||||
|
def __hash__(self) -> int:
|
||||||
|
return hash(self.id)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def init_cls(cls, maubot: "Maubot") -> None:
|
||||||
|
cls.maubot = maubot
|
||||||
|
|
||||||
|
def _make_client(
|
||||||
|
self, homeserver: str | None = None, token: str | None = None, device_id: str | None = None
|
||||||
|
) -> MaubotMatrixClient:
|
||||||
|
return MaubotMatrixClient(
|
||||||
|
mxid=self.id,
|
||||||
|
base_url=homeserver or self.homeserver,
|
||||||
|
token=token or self.access_token,
|
||||||
|
client_session=self.http_client,
|
||||||
|
log=self.log,
|
||||||
|
crypto_log=self.log.getChild("crypto"),
|
||||||
|
loop=self.maubot.loop,
|
||||||
|
device_id=device_id or self.device_id,
|
||||||
|
sync_store=self,
|
||||||
|
state_store=self.maubot.state_store,
|
||||||
|
)
|
||||||
|
|
||||||
|
def postinit(self) -> None:
|
||||||
|
if self._postinited:
|
||||||
|
raise RuntimeError("postinit() called twice")
|
||||||
|
self._postinited = True
|
||||||
self.cache[self.id] = self
|
self.cache[self.id] = self
|
||||||
self.log = log.getChild(self.id)
|
self.log = self.log.getChild(self.id)
|
||||||
|
self.http_client = ClientSession(loop=self.maubot.loop)
|
||||||
self.references = set()
|
self.references = set()
|
||||||
self.started = False
|
self.started = False
|
||||||
self.sync_ok = True
|
self.sync_ok = True
|
||||||
self.remote_displayname = None
|
self.remote_displayname = None
|
||||||
self.remote_avatar_url = None
|
self.remote_avatar_url = None
|
||||||
self.client = MaubotMatrixClient(mxid=self.id, base_url=self.homeserver,
|
self.client = self._make_client()
|
||||||
token=self.access_token, client_session=self.http_client,
|
if self.enable_crypto:
|
||||||
log=self.log, loop=self.loop,
|
self._prepare_crypto()
|
||||||
store=ClientStoreProxy(self.db_instance))
|
else:
|
||||||
|
self.crypto_store = None
|
||||||
|
self.crypto = None
|
||||||
self.client.ignore_initial_sync = True
|
self.client.ignore_initial_sync = True
|
||||||
self.client.ignore_first_sync = True
|
self.client.ignore_first_sync = True
|
||||||
|
self.client.presence = PresenceState.ONLINE if self.online else PresenceState.OFFLINE
|
||||||
if self.autojoin:
|
if self.autojoin:
|
||||||
self.client.add_event_handler(EventType.ROOM_MEMBER, self._handle_invite)
|
self.client.add_event_handler(EventType.ROOM_MEMBER, self._handle_invite)
|
||||||
self.client.add_event_handler(EventType.ROOM_TOMBSTONE, self._handle_tombstone)
|
self.client.add_event_handler(EventType.ROOM_TOMBSTONE, self._handle_tombstone)
|
||||||
self.client.add_event_handler(InternalEventType.SYNC_ERRORED, self._set_sync_ok(False))
|
self.client.add_event_handler(InternalEventType.SYNC_ERRORED, self._set_sync_ok(False))
|
||||||
self.client.add_event_handler(InternalEventType.SYNC_SUCCESSFUL, self._set_sync_ok(True))
|
self.client.add_event_handler(InternalEventType.SYNC_SUCCESSFUL, self._set_sync_ok(True))
|
||||||
|
|
||||||
def _set_sync_ok(self, ok: bool) -> Callable[[Dict[str, Any]], Awaitable[None]]:
|
def _set_sync_ok(self, ok: bool) -> Callable[[dict[str, Any]], Awaitable[None]]:
|
||||||
async def handler(data: Dict[str, Any]) -> None:
|
async def handler(data: dict[str, Any]) -> None:
|
||||||
self.sync_ok = ok
|
self.sync_ok = ok
|
||||||
|
|
||||||
return handler
|
return handler
|
||||||
|
|
||||||
async def start(self, try_n: Optional[int] = 0) -> None:
|
@property
|
||||||
|
def enable_crypto(self) -> bool:
|
||||||
|
if not self.device_id:
|
||||||
|
return False
|
||||||
|
elif not OlmMachine:
|
||||||
|
global crypto_import_error
|
||||||
|
self.log.warning(
|
||||||
|
"Client has device ID, but encryption dependencies not installed",
|
||||||
|
exc_info=crypto_import_error,
|
||||||
|
)
|
||||||
|
# Clear the stack trace after it's logged once to avoid spamming logs
|
||||||
|
crypto_import_error = None
|
||||||
|
return False
|
||||||
|
elif not self.maubot.crypto_db:
|
||||||
|
self.log.warning("Client has device ID, but crypto database is not prepared")
|
||||||
|
return False
|
||||||
|
return True
|
||||||
|
|
||||||
|
def _prepare_crypto(self) -> None:
|
||||||
|
self.crypto_store = PgCryptoStore(
|
||||||
|
account_id=self.id, pickle_key="mau.crypto", db=self.maubot.crypto_db
|
||||||
|
)
|
||||||
|
self.crypto = OlmMachine(
|
||||||
|
self.client,
|
||||||
|
self.crypto_store,
|
||||||
|
self.maubot.state_store,
|
||||||
|
log=self.client.crypto_log,
|
||||||
|
)
|
||||||
|
self.client.crypto = self.crypto
|
||||||
|
|
||||||
|
def _remove_crypto_event_handlers(self) -> None:
|
||||||
|
if not self.crypto:
|
||||||
|
return
|
||||||
|
handlers = [
|
||||||
|
(InternalEventType.DEVICE_OTK_COUNT, self.crypto.handle_otk_count),
|
||||||
|
(InternalEventType.DEVICE_LISTS, self.crypto.handle_device_lists),
|
||||||
|
(EventType.TO_DEVICE_ENCRYPTED, self.crypto.handle_to_device_event),
|
||||||
|
(EventType.ROOM_KEY_REQUEST, self.crypto.handle_room_key_request),
|
||||||
|
(EventType.ROOM_MEMBER, self.crypto.handle_member_event),
|
||||||
|
]
|
||||||
|
for event_type, func in handlers:
|
||||||
|
self.client.remove_event_handler(event_type, func)
|
||||||
|
|
||||||
|
async def start(self, try_n: int | None = 0) -> None:
|
||||||
try:
|
try:
|
||||||
if try_n > 0:
|
if try_n > 0:
|
||||||
await asyncio.sleep(try_n * 10)
|
await asyncio.sleep(try_n * 10)
|
||||||
@ -83,7 +217,21 @@ class Client:
|
|||||||
except Exception:
|
except Exception:
|
||||||
self.log.exception("Failed to start")
|
self.log.exception("Failed to start")
|
||||||
|
|
||||||
async def _start(self, try_n: Optional[int] = 0) -> None:
|
async def _start_crypto(self) -> None:
|
||||||
|
self.log.debug("Enabling end-to-end encryption support")
|
||||||
|
await self.crypto_store.open()
|
||||||
|
crypto_device_id = await self.crypto_store.get_device_id()
|
||||||
|
if crypto_device_id and crypto_device_id != self.device_id:
|
||||||
|
self.log.warning(
|
||||||
|
"Mismatching device ID in crypto store and main database, resetting encryption"
|
||||||
|
)
|
||||||
|
await self.crypto_store.delete()
|
||||||
|
crypto_device_id = None
|
||||||
|
await self.crypto.load()
|
||||||
|
if not crypto_device_id:
|
||||||
|
await self.crypto_store.put_device_id(self.device_id)
|
||||||
|
|
||||||
|
async def _start(self, try_n: int | None = 0) -> None:
|
||||||
if not self.enabled:
|
if not self.enabled:
|
||||||
self.log.debug("Not starting disabled client")
|
self.log.debug("Not starting disabled client")
|
||||||
return
|
return
|
||||||
@ -91,36 +239,60 @@ class Client:
|
|||||||
self.log.warning("Ignoring start() call to started client")
|
self.log.warning("Ignoring start() call to started client")
|
||||||
return
|
return
|
||||||
try:
|
try:
|
||||||
user_id = await self.client.whoami()
|
await self.client.versions()
|
||||||
|
whoami = await self.client.whoami()
|
||||||
except MatrixInvalidToken as e:
|
except MatrixInvalidToken as e:
|
||||||
self.log.error(f"Invalid token: {e}. Disabling client")
|
self.log.error(f"Invalid token: {e}. Disabling client")
|
||||||
self.db_instance.enabled = False
|
self.enabled = False
|
||||||
|
await self.update()
|
||||||
return
|
return
|
||||||
except MatrixRequestError:
|
except Exception as e:
|
||||||
if try_n >= 5:
|
if try_n >= 8:
|
||||||
self.log.exception("Failed to get /account/whoami, disabling client")
|
self.log.exception("Failed to get /account/whoami, disabling client")
|
||||||
self.db_instance.enabled = False
|
self.enabled = False
|
||||||
|
await self.update()
|
||||||
else:
|
else:
|
||||||
self.log.exception(f"Failed to get /account/whoami, "
|
self.log.warning(
|
||||||
f"retrying in {(try_n + 1) * 10}s")
|
f"Failed to get /account/whoami, retrying in {(try_n + 1) * 10}s: {e}"
|
||||||
_ = asyncio.ensure_future(self.start(try_n + 1), loop=self.loop)
|
)
|
||||||
|
background_task.create(self.start(try_n + 1))
|
||||||
return
|
return
|
||||||
if user_id != self.id:
|
if whoami.user_id != self.id:
|
||||||
self.log.error(f"User ID mismatch: expected {self.id}, but got {user_id}")
|
self.log.error(f"User ID mismatch: expected {self.id}, but got {whoami.user_id}")
|
||||||
self.db_instance.enabled = False
|
self.enabled = False
|
||||||
|
await self.update()
|
||||||
|
return
|
||||||
|
elif whoami.device_id and self.device_id and whoami.device_id != self.device_id:
|
||||||
|
self.log.error(
|
||||||
|
f"Device ID mismatch: expected {self.device_id}, but got {whoami.device_id}"
|
||||||
|
)
|
||||||
|
self.enabled = False
|
||||||
|
await self.update()
|
||||||
return
|
return
|
||||||
if not self.filter_id:
|
if not self.filter_id:
|
||||||
self.db_instance.edit(filter_id=await self.client.create_filter(Filter(
|
self.filter_id = await self.client.create_filter(
|
||||||
|
Filter(
|
||||||
room=RoomFilter(
|
room=RoomFilter(
|
||||||
timeline=RoomEventFilter(
|
timeline=RoomEventFilter(
|
||||||
limit=50,
|
limit=50,
|
||||||
|
lazy_load_members=True,
|
||||||
|
),
|
||||||
|
state=StateFilter(
|
||||||
|
lazy_load_members=True,
|
||||||
),
|
),
|
||||||
),
|
),
|
||||||
)))
|
presence=EventFilter(
|
||||||
|
not_types=[EventType.PRESENCE],
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
await self.update()
|
||||||
if self.displayname != "disable":
|
if self.displayname != "disable":
|
||||||
await self.client.set_displayname(self.displayname)
|
await self.client.set_displayname(self.displayname)
|
||||||
if self.avatar_url != "disable":
|
if self.avatar_url != "disable":
|
||||||
await self.client.set_avatar_url(self.avatar_url)
|
await self.client.set_avatar_url(self.avatar_url)
|
||||||
|
if self.crypto:
|
||||||
|
await self._start_crypto()
|
||||||
self.start_sync()
|
self.start_sync()
|
||||||
await self._update_remote_profile()
|
await self._update_remote_profile()
|
||||||
self.started = True
|
self.started = True
|
||||||
@ -128,11 +300,10 @@ class Client:
|
|||||||
await self.start_plugins()
|
await self.start_plugins()
|
||||||
|
|
||||||
async def start_plugins(self) -> None:
|
async def start_plugins(self) -> None:
|
||||||
await asyncio.gather(*[plugin.start() for plugin in self.references], loop=self.loop)
|
await asyncio.gather(*[plugin.start() for plugin in self.references])
|
||||||
|
|
||||||
async def stop_plugins(self) -> None:
|
async def stop_plugins(self) -> None:
|
||||||
await asyncio.gather(*[plugin.stop() for plugin in self.references if plugin.started],
|
await asyncio.gather(*[plugin.stop() for plugin in self.references if plugin.started])
|
||||||
loop=self.loop)
|
|
||||||
|
|
||||||
def start_sync(self) -> None:
|
def start_sync(self) -> None:
|
||||||
if self.sync:
|
if self.sync:
|
||||||
@ -146,29 +317,31 @@ class Client:
|
|||||||
self.started = False
|
self.started = False
|
||||||
await self.stop_plugins()
|
await self.stop_plugins()
|
||||||
self.stop_sync()
|
self.stop_sync()
|
||||||
|
if self.crypto:
|
||||||
|
await self.crypto_store.close()
|
||||||
|
|
||||||
def clear_cache(self) -> None:
|
async def clear_cache(self) -> None:
|
||||||
self.stop_sync()
|
self.stop_sync()
|
||||||
self.db_instance.edit(filter_id="", next_batch="")
|
self.filter_id = FilterID("")
|
||||||
|
self.next_batch = SyncToken("")
|
||||||
|
await self.update()
|
||||||
self.start_sync()
|
self.start_sync()
|
||||||
|
|
||||||
def delete(self) -> None:
|
|
||||||
try:
|
|
||||||
del self.cache[self.id]
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
self.db_instance.delete()
|
|
||||||
|
|
||||||
def to_dict(self) -> dict:
|
def to_dict(self) -> dict:
|
||||||
return {
|
return {
|
||||||
"id": self.id,
|
"id": self.id,
|
||||||
"homeserver": self.homeserver,
|
"homeserver": self.homeserver,
|
||||||
"access_token": self.access_token,
|
"access_token": self.access_token,
|
||||||
|
"device_id": self.device_id,
|
||||||
|
"fingerprint": (
|
||||||
|
self.crypto.account.fingerprint if self.crypto and self.crypto.account else None
|
||||||
|
),
|
||||||
"enabled": self.enabled,
|
"enabled": self.enabled,
|
||||||
"started": self.started,
|
"started": self.started,
|
||||||
"sync": self.sync,
|
"sync": self.sync,
|
||||||
"sync_ok": self.sync_ok,
|
"sync_ok": self.sync_ok,
|
||||||
"autojoin": self.autojoin,
|
"autojoin": self.autojoin,
|
||||||
|
"online": self.online,
|
||||||
"displayname": self.displayname,
|
"displayname": self.displayname,
|
||||||
"avatar_url": self.avatar_url,
|
"avatar_url": self.avatar_url,
|
||||||
"remote_displayname": self.remote_displayname,
|
"remote_displayname": self.remote_displayname,
|
||||||
@ -176,20 +349,6 @@ class Client:
|
|||||||
"instances": [instance.to_dict() for instance in self.references],
|
"instances": [instance.to_dict() for instance in self.references],
|
||||||
}
|
}
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get(cls, user_id: UserID, db_instance: Optional[DBClient] = None) -> Optional['Client']:
|
|
||||||
try:
|
|
||||||
return cls.cache[user_id]
|
|
||||||
except KeyError:
|
|
||||||
db_instance = db_instance or DBClient.get(user_id)
|
|
||||||
if not db_instance:
|
|
||||||
return None
|
|
||||||
return Client(db_instance)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def all(cls) -> Iterable['Client']:
|
|
||||||
return (cls.get(user.id, user) for user in DBClient.all())
|
|
||||||
|
|
||||||
async def _handle_tombstone(self, evt: StateEvent) -> None:
|
async def _handle_tombstone(self, evt: StateEvent) -> None:
|
||||||
if not evt.content.replacement_room:
|
if not evt.content.replacement_room:
|
||||||
self.log.info(f"{evt.room_id} tombstoned with no replacement, ignoring")
|
self.log.info(f"{evt.room_id} tombstoned with no replacement, ignoring")
|
||||||
@ -201,7 +360,7 @@ class Client:
|
|||||||
if evt.state_key == self.id and evt.content.membership == Membership.INVITE:
|
if evt.state_key == self.id and evt.content.membership == Membership.INVITE:
|
||||||
await self.client.join_room(evt.room_id)
|
await self.client.join_room(evt.room_id)
|
||||||
|
|
||||||
async def update_started(self, started: bool) -> None:
|
async def update_started(self, started: bool | None) -> None:
|
||||||
if started is None or started == self.started:
|
if started is None or started == self.started:
|
||||||
return
|
return
|
||||||
if started:
|
if started:
|
||||||
@ -209,117 +368,162 @@ class Client:
|
|||||||
else:
|
else:
|
||||||
await self.stop()
|
await self.stop()
|
||||||
|
|
||||||
async def update_displayname(self, displayname: str) -> None:
|
async def update_enabled(self, enabled: bool | None, save: bool = True) -> None:
|
||||||
|
if enabled is None or enabled == self.enabled:
|
||||||
|
return
|
||||||
|
self.enabled = enabled
|
||||||
|
if save:
|
||||||
|
await self.update()
|
||||||
|
|
||||||
|
async def update_displayname(self, displayname: str | None, save: bool = True) -> None:
|
||||||
if displayname is None or displayname == self.displayname:
|
if displayname is None or displayname == self.displayname:
|
||||||
return
|
return
|
||||||
self.db_instance.displayname = displayname
|
self.displayname = displayname
|
||||||
if self.displayname != "disable":
|
if self.displayname != "disable":
|
||||||
await self.client.set_displayname(self.displayname)
|
await self.client.set_displayname(self.displayname)
|
||||||
else:
|
else:
|
||||||
await self._update_remote_profile()
|
await self._update_remote_profile()
|
||||||
|
if save:
|
||||||
|
await self.update()
|
||||||
|
|
||||||
async def update_avatar_url(self, avatar_url: ContentURI) -> None:
|
async def update_avatar_url(self, avatar_url: ContentURI, save: bool = True) -> None:
|
||||||
if avatar_url is None or avatar_url == self.avatar_url:
|
if avatar_url is None or avatar_url == self.avatar_url:
|
||||||
return
|
return
|
||||||
self.db_instance.avatar_url = avatar_url
|
self.avatar_url = avatar_url
|
||||||
if self.avatar_url != "disable":
|
if self.avatar_url != "disable":
|
||||||
await self.client.set_avatar_url(self.avatar_url)
|
await self.client.set_avatar_url(self.avatar_url)
|
||||||
else:
|
else:
|
||||||
await self._update_remote_profile()
|
await self._update_remote_profile()
|
||||||
|
if save:
|
||||||
|
await self.update()
|
||||||
|
|
||||||
async def update_access_details(self, access_token: str, homeserver: str) -> None:
|
async def update_sync(self, sync: bool | None, save: bool = True) -> None:
|
||||||
|
if sync is None or self.sync == sync:
|
||||||
|
return
|
||||||
|
self.sync = sync
|
||||||
|
if self.started:
|
||||||
|
if sync:
|
||||||
|
self.start_sync()
|
||||||
|
else:
|
||||||
|
self.stop_sync()
|
||||||
|
if save:
|
||||||
|
await self.update()
|
||||||
|
|
||||||
|
async def update_autojoin(self, autojoin: bool | None, save: bool = True) -> None:
|
||||||
|
if autojoin is None or autojoin == self.autojoin:
|
||||||
|
return
|
||||||
|
if autojoin:
|
||||||
|
self.client.add_event_handler(EventType.ROOM_MEMBER, self._handle_invite)
|
||||||
|
else:
|
||||||
|
self.client.remove_event_handler(EventType.ROOM_MEMBER, self._handle_invite)
|
||||||
|
self.autojoin = autojoin
|
||||||
|
if save:
|
||||||
|
await self.update()
|
||||||
|
|
||||||
|
async def update_online(self, online: bool | None, save: bool = True) -> None:
|
||||||
|
if online is None or online == self.online:
|
||||||
|
return
|
||||||
|
self.client.presence = PresenceState.ONLINE if online else PresenceState.OFFLINE
|
||||||
|
self.online = online
|
||||||
|
if save:
|
||||||
|
await self.update()
|
||||||
|
|
||||||
|
async def update_access_details(
|
||||||
|
self,
|
||||||
|
access_token: str | None,
|
||||||
|
homeserver: str | None,
|
||||||
|
device_id: str | None = None,
|
||||||
|
) -> None:
|
||||||
if not access_token and not homeserver:
|
if not access_token and not homeserver:
|
||||||
return
|
return
|
||||||
elif access_token == self.access_token and homeserver == self.homeserver:
|
if device_id is None:
|
||||||
|
device_id = self.device_id
|
||||||
|
elif not device_id:
|
||||||
|
device_id = None
|
||||||
|
if (
|
||||||
|
access_token == self.access_token
|
||||||
|
and homeserver == self.homeserver
|
||||||
|
and device_id == self.device_id
|
||||||
|
):
|
||||||
return
|
return
|
||||||
new_client = MaubotMatrixClient(mxid=self.id, base_url=homeserver or self.homeserver,
|
new_client = self._make_client(homeserver, access_token, device_id)
|
||||||
token=access_token or self.access_token, loop=self.loop,
|
whoami = await new_client.whoami()
|
||||||
client_session=self.http_client, log=self.log)
|
if whoami.user_id != self.id:
|
||||||
mxid = await new_client.whoami()
|
raise ValueError(f"MXID mismatch: {whoami.user_id}")
|
||||||
if mxid != self.id:
|
elif whoami.device_id and device_id and whoami.device_id != device_id:
|
||||||
raise ValueError(f"MXID mismatch: {mxid}")
|
raise ValueError(f"Device ID mismatch: {whoami.device_id}")
|
||||||
new_client.store = self.db_instance
|
new_client.sync_store = self
|
||||||
self.stop_sync()
|
self.stop_sync()
|
||||||
|
|
||||||
|
# TODO this event handler transfer is pretty hacky
|
||||||
|
self._remove_crypto_event_handlers()
|
||||||
|
self.client.crypto = None
|
||||||
|
new_client.event_handlers = self.client.event_handlers
|
||||||
|
new_client.global_event_handlers = self.client.global_event_handlers
|
||||||
|
|
||||||
self.client = new_client
|
self.client = new_client
|
||||||
self.db_instance.homeserver = homeserver
|
self.homeserver = homeserver
|
||||||
self.db_instance.access_token = access_token
|
self.access_token = access_token
|
||||||
|
self.device_id = device_id
|
||||||
|
if self.enable_crypto:
|
||||||
|
self._prepare_crypto()
|
||||||
|
await self._start_crypto()
|
||||||
|
else:
|
||||||
|
self.crypto_store = None
|
||||||
|
self.crypto = None
|
||||||
self.start_sync()
|
self.start_sync()
|
||||||
|
|
||||||
async def _update_remote_profile(self) -> None:
|
async def _update_remote_profile(self) -> None:
|
||||||
profile = await self.client.get_profile(self.id)
|
profile = await self.client.get_profile(self.id)
|
||||||
self.remote_displayname, self.remote_avatar_url = profile.displayname, profile.avatar_url
|
self.remote_displayname, self.remote_avatar_url = profile.displayname, profile.avatar_url
|
||||||
|
|
||||||
# region Properties
|
async def delete(self) -> None:
|
||||||
|
try:
|
||||||
|
del self.cache[self.id]
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
await super().delete()
|
||||||
|
|
||||||
@property
|
@classmethod
|
||||||
def id(self) -> UserID:
|
@async_getter_lock
|
||||||
return self.db_instance.id
|
async def get(
|
||||||
|
cls,
|
||||||
|
user_id: UserID,
|
||||||
|
*,
|
||||||
|
homeserver: str | None = None,
|
||||||
|
access_token: str | None = None,
|
||||||
|
device_id: DeviceID | None = None,
|
||||||
|
) -> Client | None:
|
||||||
|
try:
|
||||||
|
return cls.cache[user_id]
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
|
||||||
@property
|
user = cast(cls, await super().get(user_id))
|
||||||
def homeserver(self) -> str:
|
if user is not None:
|
||||||
return self.db_instance.homeserver
|
user.postinit()
|
||||||
|
return user
|
||||||
|
|
||||||
@property
|
if homeserver and access_token:
|
||||||
def access_token(self) -> str:
|
user = cls(
|
||||||
return self.db_instance.access_token
|
user_id,
|
||||||
|
homeserver=homeserver,
|
||||||
|
access_token=access_token,
|
||||||
|
device_id=device_id or "",
|
||||||
|
)
|
||||||
|
await user.insert()
|
||||||
|
user.postinit()
|
||||||
|
return user
|
||||||
|
|
||||||
@property
|
return None
|
||||||
def enabled(self) -> bool:
|
|
||||||
return self.db_instance.enabled
|
|
||||||
|
|
||||||
@enabled.setter
|
@classmethod
|
||||||
def enabled(self, value: bool) -> None:
|
async def all(cls) -> AsyncGenerator[Client, None]:
|
||||||
self.db_instance.enabled = value
|
users = await super().all()
|
||||||
|
user: cls
|
||||||
@property
|
for user in users:
|
||||||
def next_batch(self) -> SyncToken:
|
try:
|
||||||
return self.db_instance.next_batch
|
yield cls.cache[user.id]
|
||||||
|
except KeyError:
|
||||||
@property
|
user.postinit()
|
||||||
def filter_id(self) -> FilterID:
|
yield user
|
||||||
return self.db_instance.filter_id
|
|
||||||
|
|
||||||
@property
|
|
||||||
def sync(self) -> bool:
|
|
||||||
return self.db_instance.sync
|
|
||||||
|
|
||||||
@sync.setter
|
|
||||||
def sync(self, value: bool) -> None:
|
|
||||||
if value == self.db_instance.sync:
|
|
||||||
return
|
|
||||||
self.db_instance.sync = value
|
|
||||||
if self.started:
|
|
||||||
if value:
|
|
||||||
self.start_sync()
|
|
||||||
else:
|
|
||||||
self.stop_sync()
|
|
||||||
|
|
||||||
@property
|
|
||||||
def autojoin(self) -> bool:
|
|
||||||
return self.db_instance.autojoin
|
|
||||||
|
|
||||||
@autojoin.setter
|
|
||||||
def autojoin(self, value: bool) -> None:
|
|
||||||
if value == self.db_instance.autojoin:
|
|
||||||
return
|
|
||||||
if value:
|
|
||||||
self.client.add_event_handler(EventType.ROOM_MEMBER, self._handle_invite)
|
|
||||||
else:
|
|
||||||
self.client.remove_event_handler(EventType.ROOM_MEMBER, self._handle_invite)
|
|
||||||
self.db_instance.autojoin = value
|
|
||||||
|
|
||||||
@property
|
|
||||||
def displayname(self) -> str:
|
|
||||||
return self.db_instance.displayname
|
|
||||||
|
|
||||||
@property
|
|
||||||
def avatar_url(self) -> ContentURI:
|
|
||||||
return self.db_instance.avatar_url
|
|
||||||
|
|
||||||
# endregion
|
|
||||||
|
|
||||||
|
|
||||||
def init(loop: asyncio.AbstractEventLoop) -> Iterable[Client]:
|
|
||||||
Client.http_client = ClientSession(loop=loop)
|
|
||||||
Client.loop = loop
|
|
||||||
return Client.all()
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# maubot - A plugin-based Matrix bot system.
|
# maubot - A plugin-based Matrix bot system.
|
||||||
# Copyright (C) 2019 Tulir Asokan
|
# Copyright (C) 2022 Tulir Asokan
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
@ -14,9 +14,10 @@
|
|||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
import random
|
import random
|
||||||
import string
|
|
||||||
import bcrypt
|
|
||||||
import re
|
import re
|
||||||
|
import string
|
||||||
|
|
||||||
|
import bcrypt
|
||||||
|
|
||||||
from mautrix.util.config import BaseFileConfig, ConfigUpdateHelper
|
from mautrix.util.config import BaseFileConfig, ConfigUpdateHelper
|
||||||
|
|
||||||
@ -31,33 +32,50 @@ class Config(BaseFileConfig):
|
|||||||
def do_update(self, helper: ConfigUpdateHelper) -> None:
|
def do_update(self, helper: ConfigUpdateHelper) -> None:
|
||||||
base = helper.base
|
base = helper.base
|
||||||
copy = helper.copy
|
copy = helper.copy
|
||||||
|
|
||||||
|
if "database" in self and self["database"].startswith("sqlite:///"):
|
||||||
|
helper.base["database"] = self["database"].replace("sqlite:///", "sqlite:")
|
||||||
|
else:
|
||||||
copy("database")
|
copy("database")
|
||||||
|
copy("database_opts")
|
||||||
|
if isinstance(self["crypto_database"], dict):
|
||||||
|
if self["crypto_database.type"] == "postgres":
|
||||||
|
base["crypto_database"] = self["crypto_database.postgres_uri"]
|
||||||
|
else:
|
||||||
|
copy("crypto_database")
|
||||||
copy("plugin_directories.upload")
|
copy("plugin_directories.upload")
|
||||||
copy("plugin_directories.load")
|
copy("plugin_directories.load")
|
||||||
copy("plugin_directories.trash")
|
copy("plugin_directories.trash")
|
||||||
copy("plugin_directories.db")
|
if "plugin_directories.db" in self:
|
||||||
|
base["plugin_databases.sqlite"] = self["plugin_directories.db"]
|
||||||
|
else:
|
||||||
|
copy("plugin_databases.sqlite")
|
||||||
|
copy("plugin_databases.postgres")
|
||||||
|
copy("plugin_databases.postgres_opts")
|
||||||
copy("server.hostname")
|
copy("server.hostname")
|
||||||
copy("server.port")
|
copy("server.port")
|
||||||
copy("server.public_url")
|
copy("server.public_url")
|
||||||
copy("server.listen")
|
copy("server.listen")
|
||||||
copy("server.base_path")
|
|
||||||
copy("server.ui_base_path")
|
copy("server.ui_base_path")
|
||||||
copy("server.plugin_base_path")
|
copy("server.plugin_base_path")
|
||||||
copy("server.override_resource_path")
|
copy("server.override_resource_path")
|
||||||
copy("server.appservice_base_path")
|
|
||||||
shared_secret = self["server.unshared_secret"]
|
shared_secret = self["server.unshared_secret"]
|
||||||
if shared_secret is None or shared_secret == "generate":
|
if shared_secret is None or shared_secret == "generate":
|
||||||
base["server.unshared_secret"] = self._new_token()
|
base["server.unshared_secret"] = self._new_token()
|
||||||
else:
|
else:
|
||||||
base["server.unshared_secret"] = shared_secret
|
base["server.unshared_secret"] = shared_secret
|
||||||
copy("registration_secrets")
|
if "registration_secrets" in self:
|
||||||
|
base["homeservers"] = self["registration_secrets"]
|
||||||
|
else:
|
||||||
|
copy("homeservers")
|
||||||
copy("admins")
|
copy("admins")
|
||||||
for username, password in base["admins"].items():
|
for username, password in base["admins"].items():
|
||||||
if password and not bcrypt_regex.match(password):
|
if password and not bcrypt_regex.match(password):
|
||||||
if password == "password":
|
if password == "password":
|
||||||
password = self._new_token()
|
password = self._new_token()
|
||||||
base["admins"][username] = bcrypt.hashpw(password.encode("utf-8"),
|
base["admins"][username] = bcrypt.hashpw(
|
||||||
bcrypt.gensalt()).decode("utf-8")
|
password.encode("utf-8"), bcrypt.gensalt()
|
||||||
|
).decode("utf-8")
|
||||||
copy("api_features.login")
|
copy("api_features.login")
|
||||||
copy("api_features.plugin")
|
copy("api_features.plugin")
|
||||||
copy("api_features.plugin_upload")
|
copy("api_features.plugin_upload")
|
||||||
|
111
maubot/db.py
111
maubot/db.py
@ -1,111 +0,0 @@
|
|||||||
# maubot - A plugin-based Matrix bot system.
|
|
||||||
# Copyright (C) 2019 Tulir Asokan
|
|
||||||
#
|
|
||||||
# This program is free software: you can redistribute it and/or modify
|
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
|
||||||
# the Free Software Foundation, either version 3 of the License, or
|
|
||||||
# (at your option) any later version.
|
|
||||||
#
|
|
||||||
# This program is distributed in the hope that it will be useful,
|
|
||||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
||||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
||||||
# GNU Affero General Public License for more details.
|
|
||||||
#
|
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
|
||||||
from typing import Iterable, Optional
|
|
||||||
import logging
|
|
||||||
import sys
|
|
||||||
|
|
||||||
from sqlalchemy import Column, String, Boolean, ForeignKey, Text
|
|
||||||
from sqlalchemy.engine.base import Engine
|
|
||||||
import sqlalchemy as sql
|
|
||||||
|
|
||||||
from mautrix.types import UserID, FilterID, SyncToken, ContentURI
|
|
||||||
from mautrix.util.db import Base
|
|
||||||
|
|
||||||
from .config import Config
|
|
||||||
|
|
||||||
|
|
||||||
class DBPlugin(Base):
|
|
||||||
__tablename__ = "plugin"
|
|
||||||
|
|
||||||
id: str = Column(String(255), primary_key=True)
|
|
||||||
type: str = Column(String(255), nullable=False)
|
|
||||||
enabled: bool = Column(Boolean, nullable=False, default=False)
|
|
||||||
primary_user: UserID = Column(String(255),
|
|
||||||
ForeignKey("client.id", onupdate="CASCADE", ondelete="RESTRICT"),
|
|
||||||
nullable=False)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def all(cls) -> Iterable['DBPlugin']:
|
|
||||||
return cls._select_all()
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get(cls, id: str) -> Optional['DBPlugin']:
|
|
||||||
return cls._select_one_or_none(cls.c.id == id)
|
|
||||||
|
|
||||||
|
|
||||||
class DBPluginFile(Base):
|
|
||||||
__tablename__ = "plugin_file"
|
|
||||||
|
|
||||||
plugin_id: str = Column(String(255),
|
|
||||||
ForeignKey("plugin.id", onupdate="CASCADE", ondelete="CASCADE"),
|
|
||||||
primary_key=True)
|
|
||||||
file_name: str = Column(String(255), primary_key=True)
|
|
||||||
content: str = Column(Text, nullable=False, default="")
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def all_for_plugin(cls, id: str) -> Iterable['DBPluginFile']:
|
|
||||||
return cls._select_all(cls.c.plugin_id == id)
|
|
||||||
|
|
||||||
|
|
||||||
class DBClient(Base):
|
|
||||||
__tablename__ = "client"
|
|
||||||
|
|
||||||
id: UserID = Column(String(255), primary_key=True)
|
|
||||||
homeserver: str = Column(String(255), nullable=False)
|
|
||||||
access_token: str = Column(Text, nullable=False)
|
|
||||||
enabled: bool = Column(Boolean, nullable=False, default=False)
|
|
||||||
|
|
||||||
next_batch: SyncToken = Column(String(255), nullable=False, default="")
|
|
||||||
filter_id: FilterID = Column(String(255), nullable=False, default="")
|
|
||||||
|
|
||||||
sync: bool = Column(Boolean, nullable=False, default=True)
|
|
||||||
autojoin: bool = Column(Boolean, nullable=False, default=True)
|
|
||||||
|
|
||||||
displayname: str = Column(String(255), nullable=False, default="")
|
|
||||||
avatar_url: ContentURI = Column(String(255), nullable=False, default="")
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def all(cls) -> Iterable['DBClient']:
|
|
||||||
return cls._select_all()
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get(cls, id: str) -> Optional['DBClient']:
|
|
||||||
return cls._select_one_or_none(cls.c.id == id)
|
|
||||||
|
|
||||||
|
|
||||||
def init(config: Config) -> Engine:
|
|
||||||
db = sql.create_engine(config["database"])
|
|
||||||
Base.metadata.bind = db
|
|
||||||
|
|
||||||
for table in (DBPlugin, DBClient):
|
|
||||||
table.bind(db)
|
|
||||||
|
|
||||||
if not db.has_table("alembic_version"):
|
|
||||||
log = logging.getLogger("maubot.db")
|
|
||||||
|
|
||||||
if db.has_table("client") and db.has_table("plugin"):
|
|
||||||
log.warning("alembic_version table not found, but client and plugin tables found. "
|
|
||||||
"Assuming pre-Alembic database and inserting version.")
|
|
||||||
db.execute("CREATE TABLE IF NOT EXISTS alembic_version ("
|
|
||||||
" version_num VARCHAR(32) PRIMARY KEY"
|
|
||||||
");")
|
|
||||||
db.execute("INSERT INTO alembic_version VALUES ('d295f8dcfa64');")
|
|
||||||
else:
|
|
||||||
log.critical("alembic_version table not found. "
|
|
||||||
"Did you forget to `alembic upgrade head`?")
|
|
||||||
sys.exit(10)
|
|
||||||
|
|
||||||
return db
|
|
13
maubot/db/__init__.py
Normal file
13
maubot/db/__init__.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
from mautrix.util.async_db import Database
|
||||||
|
|
||||||
|
from .client import Client
|
||||||
|
from .instance import DatabaseEngine, Instance
|
||||||
|
from .upgrade import upgrade_table
|
||||||
|
|
||||||
|
|
||||||
|
def init(db: Database) -> None:
|
||||||
|
for table in (Client, Instance):
|
||||||
|
table.db = db
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ["upgrade_table", "init", "Client", "Instance", "DatabaseEngine"]
|
114
maubot/db/client.py
Normal file
114
maubot/db/client.py
Normal file
@ -0,0 +1,114 @@
|
|||||||
|
# maubot - A plugin-based Matrix bot system.
|
||||||
|
# Copyright (C) 2022 Tulir Asokan
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import TYPE_CHECKING, ClassVar
|
||||||
|
|
||||||
|
from asyncpg import Record
|
||||||
|
from attr import dataclass
|
||||||
|
|
||||||
|
from mautrix.client import SyncStore
|
||||||
|
from mautrix.types import ContentURI, DeviceID, FilterID, SyncToken, UserID
|
||||||
|
from mautrix.util.async_db import Database
|
||||||
|
|
||||||
|
fake_db = Database.create("") if TYPE_CHECKING else None
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Client(SyncStore):
|
||||||
|
db: ClassVar[Database] = fake_db
|
||||||
|
|
||||||
|
id: UserID
|
||||||
|
homeserver: str
|
||||||
|
access_token: str
|
||||||
|
device_id: DeviceID
|
||||||
|
enabled: bool
|
||||||
|
|
||||||
|
next_batch: SyncToken
|
||||||
|
filter_id: FilterID
|
||||||
|
|
||||||
|
sync: bool
|
||||||
|
autojoin: bool
|
||||||
|
online: bool
|
||||||
|
|
||||||
|
displayname: str
|
||||||
|
avatar_url: ContentURI
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _from_row(cls, row: Record | None) -> Client | None:
|
||||||
|
if row is None:
|
||||||
|
return None
|
||||||
|
return cls(**row)
|
||||||
|
|
||||||
|
_columns = (
|
||||||
|
"id, homeserver, access_token, device_id, enabled, next_batch, filter_id, "
|
||||||
|
"sync, autojoin, online, displayname, avatar_url"
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _values(self):
|
||||||
|
return (
|
||||||
|
self.id,
|
||||||
|
self.homeserver,
|
||||||
|
self.access_token,
|
||||||
|
self.device_id,
|
||||||
|
self.enabled,
|
||||||
|
self.next_batch,
|
||||||
|
self.filter_id,
|
||||||
|
self.sync,
|
||||||
|
self.autojoin,
|
||||||
|
self.online,
|
||||||
|
self.displayname,
|
||||||
|
self.avatar_url,
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def all(cls) -> list[Client]:
|
||||||
|
rows = await cls.db.fetch(f"SELECT {cls._columns} FROM client")
|
||||||
|
return [cls._from_row(row) for row in rows]
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def get(cls, id: str) -> Client | None:
|
||||||
|
q = f"SELECT {cls._columns} FROM client WHERE id=$1"
|
||||||
|
return cls._from_row(await cls.db.fetchrow(q, id))
|
||||||
|
|
||||||
|
async def insert(self) -> None:
|
||||||
|
q = """
|
||||||
|
INSERT INTO client (
|
||||||
|
id, homeserver, access_token, device_id, enabled, next_batch, filter_id,
|
||||||
|
sync, autojoin, online, displayname, avatar_url
|
||||||
|
) VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)
|
||||||
|
"""
|
||||||
|
await self.db.execute(q, *self._values)
|
||||||
|
|
||||||
|
async def put_next_batch(self, next_batch: SyncToken) -> None:
|
||||||
|
await self.db.execute("UPDATE client SET next_batch=$1 WHERE id=$2", next_batch, self.id)
|
||||||
|
self.next_batch = next_batch
|
||||||
|
|
||||||
|
async def get_next_batch(self) -> SyncToken:
|
||||||
|
return self.next_batch
|
||||||
|
|
||||||
|
async def update(self) -> None:
|
||||||
|
q = """
|
||||||
|
UPDATE client SET homeserver=$2, access_token=$3, device_id=$4, enabled=$5,
|
||||||
|
next_batch=$6, filter_id=$7, sync=$8, autojoin=$9, online=$10,
|
||||||
|
displayname=$11, avatar_url=$12
|
||||||
|
WHERE id=$1
|
||||||
|
"""
|
||||||
|
await self.db.execute(q, *self._values)
|
||||||
|
|
||||||
|
async def delete(self) -> None:
|
||||||
|
await self.db.execute("DELETE FROM client WHERE id=$1", self.id)
|
101
maubot/db/instance.py
Normal file
101
maubot/db/instance.py
Normal file
@ -0,0 +1,101 @@
|
|||||||
|
# maubot - A plugin-based Matrix bot system.
|
||||||
|
# Copyright (C) 2022 Tulir Asokan
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import TYPE_CHECKING, ClassVar
|
||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
from asyncpg import Record
|
||||||
|
from attr import dataclass
|
||||||
|
|
||||||
|
from mautrix.types import UserID
|
||||||
|
from mautrix.util.async_db import Database
|
||||||
|
|
||||||
|
fake_db = Database.create("") if TYPE_CHECKING else None
|
||||||
|
|
||||||
|
|
||||||
|
class DatabaseEngine(Enum):
|
||||||
|
SQLITE = "sqlite"
|
||||||
|
POSTGRES = "postgres"
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class Instance:
|
||||||
|
db: ClassVar[Database] = fake_db
|
||||||
|
|
||||||
|
id: str
|
||||||
|
type: str
|
||||||
|
enabled: bool
|
||||||
|
primary_user: UserID
|
||||||
|
config_str: str
|
||||||
|
database_engine: DatabaseEngine | None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def database_engine_str(self) -> str | None:
|
||||||
|
return self.database_engine.value if self.database_engine else None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _from_row(cls, row: Record | None) -> Instance | None:
|
||||||
|
if row is None:
|
||||||
|
return None
|
||||||
|
data = {**row}
|
||||||
|
db_engine = data.pop("database_engine", None)
|
||||||
|
return cls(**data, database_engine=DatabaseEngine(db_engine) if db_engine else None)
|
||||||
|
|
||||||
|
_columns = "id, type, enabled, primary_user, config, database_engine"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def all(cls) -> list[Instance]:
|
||||||
|
q = f"SELECT {cls._columns} FROM instance"
|
||||||
|
rows = await cls.db.fetch(q)
|
||||||
|
return [cls._from_row(row) for row in rows]
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
async def get(cls, id: str) -> Instance | None:
|
||||||
|
q = f"SELECT {cls._columns} FROM instance WHERE id=$1"
|
||||||
|
return cls._from_row(await cls.db.fetchrow(q, id))
|
||||||
|
|
||||||
|
async def update_id(self, new_id: str) -> None:
|
||||||
|
await self.db.execute("UPDATE instance SET id=$1 WHERE id=$2", new_id, self.id)
|
||||||
|
self.id = new_id
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _values(self):
|
||||||
|
return (
|
||||||
|
self.id,
|
||||||
|
self.type,
|
||||||
|
self.enabled,
|
||||||
|
self.primary_user,
|
||||||
|
self.config_str,
|
||||||
|
self.database_engine_str,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def insert(self) -> None:
|
||||||
|
q = (
|
||||||
|
"INSERT INTO instance (id, type, enabled, primary_user, config, database_engine) "
|
||||||
|
"VALUES ($1, $2, $3, $4, $5, $6)"
|
||||||
|
)
|
||||||
|
await self.db.execute(q, *self._values)
|
||||||
|
|
||||||
|
async def update(self) -> None:
|
||||||
|
q = """
|
||||||
|
UPDATE instance SET type=$2, enabled=$3, primary_user=$4, config=$5, database_engine=$6
|
||||||
|
WHERE id=$1
|
||||||
|
"""
|
||||||
|
await self.db.execute(q, *self._values)
|
||||||
|
|
||||||
|
async def delete(self) -> None:
|
||||||
|
await self.db.execute("DELETE FROM instance WHERE id=$1", self.id)
|
5
maubot/db/upgrade/__init__.py
Normal file
5
maubot/db/upgrade/__init__.py
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
from mautrix.util.async_db import UpgradeTable
|
||||||
|
|
||||||
|
upgrade_table = UpgradeTable()
|
||||||
|
|
||||||
|
from . import v01_initial_revision, v02_instance_database_engine
|
136
maubot/db/upgrade/v01_initial_revision.py
Normal file
136
maubot/db/upgrade/v01_initial_revision.py
Normal file
@ -0,0 +1,136 @@
|
|||||||
|
# maubot - A plugin-based Matrix bot system.
|
||||||
|
# Copyright (C) 2022 Tulir Asokan
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from mautrix.util.async_db import Connection, Scheme
|
||||||
|
|
||||||
|
from . import upgrade_table
|
||||||
|
|
||||||
|
legacy_version_query = "SELECT version_num FROM alembic_version"
|
||||||
|
last_legacy_version = "90aa88820eab"
|
||||||
|
|
||||||
|
|
||||||
|
@upgrade_table.register(description="Initial asyncpg revision")
|
||||||
|
async def upgrade_v1(conn: Connection, scheme: Scheme) -> None:
|
||||||
|
if await conn.table_exists("alembic_version"):
|
||||||
|
await migrate_legacy_to_v1(conn, scheme)
|
||||||
|
else:
|
||||||
|
return await create_v1_tables(conn)
|
||||||
|
|
||||||
|
|
||||||
|
async def create_v1_tables(conn: Connection) -> None:
|
||||||
|
await conn.execute(
|
||||||
|
"""CREATE TABLE client (
|
||||||
|
id TEXT PRIMARY KEY,
|
||||||
|
homeserver TEXT NOT NULL,
|
||||||
|
access_token TEXT NOT NULL,
|
||||||
|
device_id TEXT NOT NULL,
|
||||||
|
enabled BOOLEAN NOT NULL,
|
||||||
|
|
||||||
|
next_batch TEXT NOT NULL,
|
||||||
|
filter_id TEXT NOT NULL,
|
||||||
|
|
||||||
|
sync BOOLEAN NOT NULL,
|
||||||
|
autojoin BOOLEAN NOT NULL,
|
||||||
|
online BOOLEAN NOT NULL,
|
||||||
|
|
||||||
|
displayname TEXT NOT NULL,
|
||||||
|
avatar_url TEXT NOT NULL
|
||||||
|
)"""
|
||||||
|
)
|
||||||
|
await conn.execute(
|
||||||
|
"""CREATE TABLE instance (
|
||||||
|
id TEXT PRIMARY KEY,
|
||||||
|
type TEXT NOT NULL,
|
||||||
|
enabled BOOLEAN NOT NULL,
|
||||||
|
primary_user TEXT NOT NULL,
|
||||||
|
config TEXT NOT NULL,
|
||||||
|
FOREIGN KEY (primary_user) REFERENCES client(id) ON DELETE RESTRICT ON UPDATE CASCADE
|
||||||
|
)"""
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def migrate_legacy_to_v1(conn: Connection, scheme: Scheme) -> None:
|
||||||
|
legacy_version = await conn.fetchval(legacy_version_query)
|
||||||
|
if legacy_version != last_legacy_version:
|
||||||
|
raise RuntimeError(
|
||||||
|
"Legacy database is not on last version. "
|
||||||
|
"Please upgrade the old database with alembic or drop it completely first."
|
||||||
|
)
|
||||||
|
await conn.execute("ALTER TABLE plugin RENAME TO instance")
|
||||||
|
await update_state_store(conn, scheme)
|
||||||
|
if scheme != Scheme.SQLITE:
|
||||||
|
await varchar_to_text(conn)
|
||||||
|
await conn.execute("DROP TABLE alembic_version")
|
||||||
|
|
||||||
|
|
||||||
|
async def update_state_store(conn: Connection, scheme: Scheme) -> None:
|
||||||
|
# The Matrix state store already has more or less the correct schema, so set the version
|
||||||
|
await conn.execute("CREATE TABLE mx_version (version INTEGER PRIMARY KEY)")
|
||||||
|
await conn.execute("INSERT INTO mx_version (version) VALUES (2)")
|
||||||
|
if scheme != Scheme.SQLITE:
|
||||||
|
# Remove old uppercase membership type and recreate it as lowercase
|
||||||
|
await conn.execute("ALTER TABLE mx_user_profile ALTER COLUMN membership TYPE TEXT")
|
||||||
|
await conn.execute("DROP TYPE IF EXISTS membership")
|
||||||
|
await conn.execute(
|
||||||
|
"CREATE TYPE membership AS ENUM ('join', 'leave', 'invite', 'ban', 'knock')"
|
||||||
|
)
|
||||||
|
await conn.execute(
|
||||||
|
"ALTER TABLE mx_user_profile ALTER COLUMN membership TYPE membership "
|
||||||
|
"USING LOWER(membership)::membership"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# Recreate table to remove CHECK constraint and lowercase everything
|
||||||
|
await conn.execute(
|
||||||
|
"""CREATE TABLE new_mx_user_profile (
|
||||||
|
room_id TEXT,
|
||||||
|
user_id TEXT,
|
||||||
|
membership TEXT NOT NULL
|
||||||
|
CHECK (membership IN ('join', 'leave', 'invite', 'ban', 'knock')),
|
||||||
|
displayname TEXT,
|
||||||
|
avatar_url TEXT,
|
||||||
|
PRIMARY KEY (room_id, user_id)
|
||||||
|
)"""
|
||||||
|
)
|
||||||
|
await conn.execute(
|
||||||
|
"""
|
||||||
|
INSERT INTO new_mx_user_profile (room_id, user_id, membership, displayname, avatar_url)
|
||||||
|
SELECT room_id, user_id, LOWER(membership), displayname, avatar_url
|
||||||
|
FROM mx_user_profile
|
||||||
|
"""
|
||||||
|
)
|
||||||
|
await conn.execute("DROP TABLE mx_user_profile")
|
||||||
|
await conn.execute("ALTER TABLE new_mx_user_profile RENAME TO mx_user_profile")
|
||||||
|
|
||||||
|
|
||||||
|
async def varchar_to_text(conn: Connection) -> None:
|
||||||
|
columns_to_adjust = {
|
||||||
|
"client": (
|
||||||
|
"id",
|
||||||
|
"homeserver",
|
||||||
|
"device_id",
|
||||||
|
"next_batch",
|
||||||
|
"filter_id",
|
||||||
|
"displayname",
|
||||||
|
"avatar_url",
|
||||||
|
),
|
||||||
|
"instance": ("id", "type", "primary_user"),
|
||||||
|
"mx_room_state": ("room_id",),
|
||||||
|
"mx_user_profile": ("room_id", "user_id", "displayname", "avatar_url"),
|
||||||
|
}
|
||||||
|
for table, columns in columns_to_adjust.items():
|
||||||
|
for column in columns:
|
||||||
|
await conn.execute(f'ALTER TABLE "{table}" ALTER COLUMN {column} TYPE TEXT')
|
@ -1,5 +1,5 @@
|
|||||||
# maubot - A plugin-based Matrix bot system.
|
# maubot - A plugin-based Matrix bot system.
|
||||||
# Copyright (C) 2019 Tulir Asokan
|
# Copyright (C) 2022 Tulir Asokan
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
@ -13,18 +13,13 @@
|
|||||||
#
|
#
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
from mautrix.client import ClientStore
|
from __future__ import annotations
|
||||||
from mautrix.types import SyncToken
|
|
||||||
|
from mautrix.util.async_db import Connection
|
||||||
|
|
||||||
|
from . import upgrade_table
|
||||||
|
|
||||||
|
|
||||||
class ClientStoreProxy(ClientStore):
|
@upgrade_table.register(description="Store instance database engine")
|
||||||
def __init__(self, db_instance) -> None:
|
async def upgrade_v2(conn: Connection) -> None:
|
||||||
self.db_instance = db_instance
|
await conn.execute("ALTER TABLE instance ADD COLUMN database_engine TEXT")
|
||||||
|
|
||||||
@property
|
|
||||||
def next_batch(self) -> SyncToken:
|
|
||||||
return self.db_instance.next_batch
|
|
||||||
|
|
||||||
@next_batch.setter
|
|
||||||
def next_batch(self, value: SyncToken) -> None:
|
|
||||||
self.db_instance.edit(next_batch=value)
|
|
@ -1,10 +1,21 @@
|
|||||||
# The full URI to the database. SQLite and Postgres are fully supported.
|
# The full URI to the database. SQLite and Postgres are fully supported.
|
||||||
# Other DBMSes supported by SQLAlchemy may or may not work.
|
|
||||||
# Format examples:
|
# Format examples:
|
||||||
# SQLite: sqlite:///filename.db
|
# SQLite: sqlite:filename.db
|
||||||
# Postgres: postgres://username:password@hostname/dbname
|
# Postgres: postgresql://username:password@hostname/dbname
|
||||||
database: sqlite:///maubot.db
|
database: sqlite:maubot.db
|
||||||
|
|
||||||
|
# Separate database URL for the crypto database. "default" means use the same database as above.
|
||||||
|
crypto_database: default
|
||||||
|
|
||||||
|
# Additional arguments for asyncpg.create_pool() or sqlite3.connect()
|
||||||
|
# https://magicstack.github.io/asyncpg/current/api/index.html#asyncpg.pool.create_pool
|
||||||
|
# https://docs.python.org/3/library/sqlite3.html#sqlite3.connect
|
||||||
|
# For sqlite, min_size is used as the connection thread pool size and max_size is ignored.
|
||||||
|
database_opts:
|
||||||
|
min_size: 1
|
||||||
|
max_size: 10
|
||||||
|
|
||||||
|
# Configuration for storing plugin .mbp files
|
||||||
plugin_directories:
|
plugin_directories:
|
||||||
# The directory where uploaded new plugins should be stored.
|
# The directory where uploaded new plugins should be stored.
|
||||||
upload: ./plugins
|
upload: ./plugins
|
||||||
@ -15,8 +26,27 @@ plugin_directories:
|
|||||||
# The directory where old plugin versions and conflicting plugins should be moved.
|
# The directory where old plugin versions and conflicting plugins should be moved.
|
||||||
# Set to "delete" to delete files immediately.
|
# Set to "delete" to delete files immediately.
|
||||||
trash: ./trash
|
trash: ./trash
|
||||||
# The directory where plugin databases should be stored.
|
|
||||||
db: ./plugins
|
# Configuration for storing plugin databases
|
||||||
|
plugin_databases:
|
||||||
|
# The directory where SQLite plugin databases should be stored.
|
||||||
|
sqlite: ./plugins
|
||||||
|
# The connection URL for plugin databases. If null, all plugins will get SQLite databases.
|
||||||
|
# If set, plugins using the new asyncpg interface will get a Postgres connection instead.
|
||||||
|
# Plugins using the legacy SQLAlchemy interface will always get a SQLite connection.
|
||||||
|
#
|
||||||
|
# To use the same connection pool as the default database, set to "default"
|
||||||
|
# (the default database above must be postgres to do this).
|
||||||
|
#
|
||||||
|
# When enabled, maubot will create separate Postgres schemas in the database for each plugin.
|
||||||
|
# To view schemas in psql, use `\dn`. To view enter and interact with a specific schema,
|
||||||
|
# use `SET search_path = name` (where `name` is the name found with `\dn`) and then use normal
|
||||||
|
# SQL queries/psql commands.
|
||||||
|
postgres: null
|
||||||
|
# Maximum number of connections per plugin instance.
|
||||||
|
postgres_max_conns_per_plugin: 3
|
||||||
|
# Overrides for the default database_opts when using a non-"default" postgres connection string.
|
||||||
|
postgres_opts: {}
|
||||||
|
|
||||||
server:
|
server:
|
||||||
# The IP and port to listen to.
|
# The IP and port to listen to.
|
||||||
@ -24,8 +54,6 @@ server:
|
|||||||
port: 29316
|
port: 29316
|
||||||
# Public base URL where the server is visible.
|
# Public base URL where the server is visible.
|
||||||
public_url: https://example.com
|
public_url: https://example.com
|
||||||
# The base management API path.
|
|
||||||
base_path: /_matrix/maubot/v1
|
|
||||||
# The base path for the UI.
|
# The base path for the UI.
|
||||||
ui_base_path: /_matrix/maubot
|
ui_base_path: /_matrix/maubot
|
||||||
# The base path for plugin endpoints. The instance ID will be appended directly.
|
# The base path for plugin endpoints. The instance ID will be appended directly.
|
||||||
@ -33,19 +61,22 @@ server:
|
|||||||
# Override path from where to load UI resources.
|
# Override path from where to load UI resources.
|
||||||
# Set to false to using pkg_resources to find the path.
|
# Set to false to using pkg_resources to find the path.
|
||||||
override_resource_path: false
|
override_resource_path: false
|
||||||
# The base appservice API path. Use / for legacy appservice API and /_matrix/app/v1 for v1.
|
|
||||||
appservice_base_path: /_matrix/app/v1
|
|
||||||
# The shared secret to sign API access tokens.
|
# The shared secret to sign API access tokens.
|
||||||
# Set to "generate" to generate and save a new token at startup.
|
# Set to "generate" to generate and save a new token at startup.
|
||||||
unshared_secret: generate
|
unshared_secret: generate
|
||||||
|
|
||||||
# Shared registration secrets to allow registering new users from the management UI
|
# Known homeservers. This is required for the `mbc auth` command and also allows
|
||||||
registration_secrets:
|
# more convenient access from the management UI. This is not required to create
|
||||||
example.com:
|
# clients in the management UI, since you can also just type the homeserver URL
|
||||||
|
# into the box there.
|
||||||
|
homeservers:
|
||||||
|
matrix.org:
|
||||||
# Client-server API URL
|
# Client-server API URL
|
||||||
url: https://example.com
|
url: https://matrix-client.matrix.org
|
||||||
# registration_shared_secret from synapse config
|
# registration_shared_secret from synapse config
|
||||||
secret: synapse_shared_registration_secret
|
# You can leave this empty if you don't have access to the homeserver.
|
||||||
|
# When this is empty, `mbc auth --register` won't work, but `mbc auth` (login) will.
|
||||||
|
secret: null
|
||||||
|
|
||||||
# List of administrator users. Plaintext passwords will be bcrypted on startup. Set empty password
|
# List of administrator users. Plaintext passwords will be bcrypted on startup. Set empty password
|
||||||
# to prevent normal login. Root is a special user that can't have a password and will always exist.
|
# to prevent normal login. Root is a special user that can't have a password and will always exist.
|
||||||
@ -90,7 +121,7 @@ logging:
|
|||||||
loggers:
|
loggers:
|
||||||
maubot:
|
maubot:
|
||||||
level: DEBUG
|
level: DEBUG
|
||||||
mautrix:
|
mau:
|
||||||
level: DEBUG
|
level: DEBUG
|
||||||
aiohttp:
|
aiohttp:
|
||||||
level: INFO
|
level: INFO
|
@ -1 +1 @@
|
|||||||
from . import event, command, web
|
from . import command, event, web
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# maubot - A plugin-based Matrix bot system.
|
# maubot - A plugin-based Matrix bot system.
|
||||||
# Copyright (C) 2019 Tulir Asokan
|
# Copyright (C) 2022 Tulir Asokan
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
@ -13,28 +13,46 @@
|
|||||||
#
|
#
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
from typing import (Union, Callable, Sequence, Pattern, Awaitable, NewType, Optional, Any, List,
|
from typing import (
|
||||||
Dict, Tuple, Set, Iterable)
|
Any,
|
||||||
|
Awaitable,
|
||||||
|
Callable,
|
||||||
|
Dict,
|
||||||
|
Iterable,
|
||||||
|
List,
|
||||||
|
NewType,
|
||||||
|
Optional,
|
||||||
|
Pattern,
|
||||||
|
Sequence,
|
||||||
|
Set,
|
||||||
|
Tuple,
|
||||||
|
Union,
|
||||||
|
)
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
import asyncio
|
import asyncio
|
||||||
import functools
|
import functools
|
||||||
import inspect
|
import inspect
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from mautrix.types import MessageType, EventType
|
from mautrix.types import EventType, MessageType
|
||||||
|
|
||||||
from ..matrix import MaubotMessageEvent
|
from ..matrix import MaubotMessageEvent
|
||||||
from . import event
|
from . import event
|
||||||
|
|
||||||
PrefixType = Optional[Union[str, Callable[[], str]]]
|
PrefixType = Optional[Union[str, Callable[[], str], Callable[[Any], str]]]
|
||||||
AliasesType = Union[List[str], Tuple[str, ...], Set[str], Callable[[str], bool]]
|
AliasesType = Union[
|
||||||
CommandHandlerFunc = NewType("CommandHandlerFunc",
|
List[str], Tuple[str, ...], Set[str], Callable[[str], bool], Callable[[Any, str], bool]
|
||||||
Callable[[MaubotMessageEvent, Any], Awaitable[Any]])
|
]
|
||||||
CommandHandlerDecorator = NewType("CommandHandlerDecorator",
|
CommandHandlerFunc = NewType(
|
||||||
Callable[[Union['CommandHandler', CommandHandlerFunc]],
|
"CommandHandlerFunc", Callable[[MaubotMessageEvent, Any], Awaitable[Any]]
|
||||||
'CommandHandler'])
|
)
|
||||||
PassiveCommandHandlerDecorator = NewType("PassiveCommandHandlerDecorator",
|
CommandHandlerDecorator = NewType(
|
||||||
Callable[[CommandHandlerFunc], CommandHandlerFunc])
|
"CommandHandlerDecorator",
|
||||||
|
Callable[[Union["CommandHandler", CommandHandlerFunc]], "CommandHandler"],
|
||||||
|
)
|
||||||
|
PassiveCommandHandlerDecorator = NewType(
|
||||||
|
"PassiveCommandHandlerDecorator", Callable[[CommandHandlerFunc], CommandHandlerFunc]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def _split_in_two(val: str, split_by: str) -> List[str]:
|
def _split_in_two(val: str, split_by: str) -> List[str]:
|
||||||
@ -51,9 +69,10 @@ class CommandHandler:
|
|||||||
self.__mb_get_name__: Callable[[Any], str] = lambda s: "noname"
|
self.__mb_get_name__: Callable[[Any], str] = lambda s: "noname"
|
||||||
self.__mb_is_command_match__: Callable[[Any, str], bool] = self.__command_match_unset
|
self.__mb_is_command_match__: Callable[[Any, str], bool] = self.__command_match_unset
|
||||||
self.__mb_require_subcommand__: bool = True
|
self.__mb_require_subcommand__: bool = True
|
||||||
|
self.__mb_must_consume_args__: bool = True
|
||||||
self.__mb_arg_fallthrough__: bool = True
|
self.__mb_arg_fallthrough__: bool = True
|
||||||
self.__mb_event_handler__: bool = True
|
self.__mb_event_handler__: bool = True
|
||||||
self.__mb_event_type__: EventType = EventType.ROOM_MESSAGE
|
self.__mb_event_types__: set[EventType] = {EventType.ROOM_MESSAGE}
|
||||||
self.__mb_msgtypes__: Iterable[MessageType] = (MessageType.TEXT,)
|
self.__mb_msgtypes__: Iterable[MessageType] = (MessageType.TEXT,)
|
||||||
self.__bound_copies__: Dict[Any, CommandHandler] = {}
|
self.__bound_copies__: Dict[Any, CommandHandler] = {}
|
||||||
self.__bound_instance__: Any = None
|
self.__bound_instance__: Any = None
|
||||||
@ -65,15 +84,27 @@ class CommandHandler:
|
|||||||
return self.__bound_copies__[instance]
|
return self.__bound_copies__[instance]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
new_ch = type(self)(self.__mb_func__)
|
new_ch = type(self)(self.__mb_func__)
|
||||||
keys = ["parent", "subcommands", "arguments", "help", "get_name", "is_command_match",
|
keys = [
|
||||||
"require_subcommand", "arg_fallthrough", "event_handler", "event_type",
|
"parent",
|
||||||
"msgtypes"]
|
"subcommands",
|
||||||
|
"arguments",
|
||||||
|
"help",
|
||||||
|
"get_name",
|
||||||
|
"is_command_match",
|
||||||
|
"require_subcommand",
|
||||||
|
"must_consume_args",
|
||||||
|
"arg_fallthrough",
|
||||||
|
"event_handler",
|
||||||
|
"event_types",
|
||||||
|
"msgtypes",
|
||||||
|
]
|
||||||
for key in keys:
|
for key in keys:
|
||||||
key = f"__mb_{key}__"
|
key = f"__mb_{key}__"
|
||||||
setattr(new_ch, key, getattr(self, key))
|
setattr(new_ch, key, getattr(self, key))
|
||||||
new_ch.__bound_instance__ = instance
|
new_ch.__bound_instance__ = instance
|
||||||
new_ch.__mb_subcommands__ = [subcmd.__get__(instance, instancetype)
|
new_ch.__mb_subcommands__ = [
|
||||||
for subcmd in self.__mb_subcommands__]
|
subcmd.__get__(instance, instancetype) for subcmd in self.__mb_subcommands__
|
||||||
|
]
|
||||||
self.__bound_copies__[instance] = new_ch
|
self.__bound_copies__[instance] = new_ch
|
||||||
return new_ch
|
return new_ch
|
||||||
|
|
||||||
@ -81,14 +112,20 @@ class CommandHandler:
|
|||||||
def __command_match_unset(self, val: str) -> bool:
|
def __command_match_unset(self, val: str) -> bool:
|
||||||
raise NotImplementedError("Hmm")
|
raise NotImplementedError("Hmm")
|
||||||
|
|
||||||
async def __call__(self, evt: MaubotMessageEvent, *, _existing_args: Dict[str, Any] = None,
|
async def __call__(
|
||||||
remaining_val: str = None) -> Any:
|
self,
|
||||||
|
evt: MaubotMessageEvent,
|
||||||
|
*,
|
||||||
|
_existing_args: Dict[str, Any] = None,
|
||||||
|
remaining_val: str = None,
|
||||||
|
) -> Any:
|
||||||
if evt.sender == evt.client.mxid or evt.content.msgtype not in self.__mb_msgtypes__:
|
if evt.sender == evt.client.mxid or evt.content.msgtype not in self.__mb_msgtypes__:
|
||||||
return
|
return
|
||||||
if remaining_val is None:
|
if remaining_val is None:
|
||||||
if not evt.content.body or evt.content.body[0] != "!":
|
if not evt.content.body or evt.content.body[0] != "!":
|
||||||
return
|
return
|
||||||
command, remaining_val = _split_in_two(evt.content.body[1:], " ")
|
command, remaining_val = _split_in_two(evt.content.body[1:], " ")
|
||||||
|
command = command.lower()
|
||||||
if not self.__mb_is_command_match__(self.__bound_instance__, command):
|
if not self.__mb_is_command_match__(self.__bound_instance__, command):
|
||||||
return
|
return
|
||||||
call_args: Dict[str, Any] = {**_existing_args} if _existing_args else {}
|
call_args: Dict[str, Any] = {**_existing_args} if _existing_args else {}
|
||||||
@ -109,26 +146,34 @@ class CommandHandler:
|
|||||||
await evt.reply(self.__mb_full_help__)
|
await evt.reply(self.__mb_full_help__)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
if self.__mb_must_consume_args__ and remaining_val.strip():
|
||||||
|
await evt.reply(self.__mb_full_help__)
|
||||||
|
return
|
||||||
|
|
||||||
if self.__bound_instance__:
|
if self.__bound_instance__:
|
||||||
return await self.__mb_func__(self.__bound_instance__, evt, **call_args)
|
return await self.__mb_func__(self.__bound_instance__, evt, **call_args)
|
||||||
return await self.__mb_func__(evt, **call_args)
|
return await self.__mb_func__(evt, **call_args)
|
||||||
|
|
||||||
async def __call_subcommand__(self, evt: MaubotMessageEvent, call_args: Dict[str, Any],
|
async def __call_subcommand__(
|
||||||
remaining_val: str) -> Tuple[bool, Any]:
|
self, evt: MaubotMessageEvent, call_args: Dict[str, Any], remaining_val: str
|
||||||
|
) -> Tuple[bool, Any]:
|
||||||
command, remaining_val = _split_in_two(remaining_val.strip(), " ")
|
command, remaining_val = _split_in_two(remaining_val.strip(), " ")
|
||||||
for subcommand in self.__mb_subcommands__:
|
for subcommand in self.__mb_subcommands__:
|
||||||
if subcommand.__mb_is_command_match__(subcommand.__bound_instance__, command):
|
if subcommand.__mb_is_command_match__(subcommand.__bound_instance__, command):
|
||||||
return True, await subcommand(evt, _existing_args=call_args,
|
return True, await subcommand(
|
||||||
remaining_val=remaining_val)
|
evt, _existing_args=call_args, remaining_val=remaining_val
|
||||||
|
)
|
||||||
return False, None
|
return False, None
|
||||||
|
|
||||||
async def __parse_args__(self, evt: MaubotMessageEvent, call_args: Dict[str, Any],
|
async def __parse_args__(
|
||||||
remaining_val: str) -> Tuple[bool, str]:
|
self, evt: MaubotMessageEvent, call_args: Dict[str, Any], remaining_val: str
|
||||||
|
) -> Tuple[bool, str]:
|
||||||
for arg in self.__mb_arguments__:
|
for arg in self.__mb_arguments__:
|
||||||
try:
|
try:
|
||||||
remaining_val, call_args[arg.name] = arg.match(remaining_val.strip(), evt=evt,
|
remaining_val, call_args[arg.name] = arg.match(
|
||||||
instance=self.__bound_instance__)
|
remaining_val.strip(), evt=evt, instance=self.__bound_instance__
|
||||||
if arg.required and not call_args[arg.name]:
|
)
|
||||||
|
if arg.required and call_args[arg.name] is None:
|
||||||
raise ValueError("Argument required")
|
raise ValueError("Argument required")
|
||||||
except ArgumentSyntaxError as e:
|
except ArgumentSyntaxError as e:
|
||||||
await evt.reply(e.message + (f"\n{self.__mb_usage__}" if e.show_usage else ""))
|
await evt.reply(e.message + (f"\n{self.__mb_usage__}" if e.show_usage else ""))
|
||||||
@ -141,13 +186,16 @@ class CommandHandler:
|
|||||||
@property
|
@property
|
||||||
def __mb_full_help__(self) -> str:
|
def __mb_full_help__(self) -> str:
|
||||||
usage = self.__mb_usage_without_subcommands__ + "\n\n"
|
usage = self.__mb_usage_without_subcommands__ + "\n\n"
|
||||||
|
if not self.__mb_require_subcommand__:
|
||||||
|
usage += f"* {self.__mb_prefix__} {self.__mb_usage_args__} - {self.__mb_help__}\n"
|
||||||
usage += "\n".join(cmd.__mb_usage_inline__ for cmd in self.__mb_subcommands__)
|
usage += "\n".join(cmd.__mb_usage_inline__ for cmd in self.__mb_subcommands__)
|
||||||
return usage
|
return usage
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def __mb_usage_args__(self) -> str:
|
def __mb_usage_args__(self) -> str:
|
||||||
arg_usage = " ".join(f"<{arg.label}>" if arg.required else f"[{arg.label}]"
|
arg_usage = " ".join(
|
||||||
for arg in self.__mb_arguments__)
|
f"<{arg.label}>" if arg.required else f"[{arg.label}]" for arg in self.__mb_arguments__
|
||||||
|
)
|
||||||
if self.__mb_subcommands__ and self.__mb_arg_fallthrough__:
|
if self.__mb_subcommands__ and self.__mb_arg_fallthrough__:
|
||||||
arg_usage += " " + self.__mb_usage_subcommand__
|
arg_usage += " " + self.__mb_usage_subcommand__
|
||||||
return arg_usage
|
return arg_usage
|
||||||
@ -163,14 +211,19 @@ class CommandHandler:
|
|||||||
@property
|
@property
|
||||||
def __mb_prefix__(self) -> str:
|
def __mb_prefix__(self) -> str:
|
||||||
if self.__mb_parent__:
|
if self.__mb_parent__:
|
||||||
return f"{self.__mb_parent__.__mb_prefix__} {self.__mb_name__}"
|
return (
|
||||||
|
f"!{self.__mb_parent__.__mb_get_name__(self.__bound_instance__)} "
|
||||||
|
f"{self.__mb_name__}"
|
||||||
|
)
|
||||||
return f"!{self.__mb_name__}"
|
return f"!{self.__mb_name__}"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def __mb_usage_inline__(self) -> str:
|
def __mb_usage_inline__(self) -> str:
|
||||||
if not self.__mb_arg_fallthrough__:
|
if not self.__mb_arg_fallthrough__:
|
||||||
return (f"* {self.__mb_name__} {self.__mb_usage_args__} - {self.__mb_help__}\n"
|
return (
|
||||||
f"* {self.__mb_name__} {self.__mb_usage_subcommand__}")
|
f"* {self.__mb_name__} {self.__mb_usage_args__} - {self.__mb_help__}\n"
|
||||||
|
f"* {self.__mb_name__} {self.__mb_usage_subcommand__}"
|
||||||
|
)
|
||||||
return f"* {self.__mb_name__} {self.__mb_usage_args__} - {self.__mb_help__}"
|
return f"* {self.__mb_name__} {self.__mb_usage_args__} - {self.__mb_help__}"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@ -180,8 +233,12 @@ class CommandHandler:
|
|||||||
@property
|
@property
|
||||||
def __mb_usage_without_subcommands__(self) -> str:
|
def __mb_usage_without_subcommands__(self) -> str:
|
||||||
if not self.__mb_arg_fallthrough__:
|
if not self.__mb_arg_fallthrough__:
|
||||||
return (f"**Usage:** {self.__mb_prefix__} {self.__mb_usage_args__}"
|
if not self.__mb_arguments__:
|
||||||
f" _OR_ {self.__mb_usage_subcommand__}")
|
return f"**Usage:** {self.__mb_prefix__} [subcommand] [...]"
|
||||||
|
return (
|
||||||
|
f"**Usage:** {self.__mb_prefix__} {self.__mb_usage_args__}"
|
||||||
|
f" _OR_ {self.__mb_prefix__} {self.__mb_usage_subcommand__}"
|
||||||
|
)
|
||||||
return f"**Usage:** {self.__mb_prefix__} {self.__mb_usage_args__}"
|
return f"**Usage:** {self.__mb_prefix__} {self.__mb_usage_args__}"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@ -190,14 +247,25 @@ class CommandHandler:
|
|||||||
return f"{self.__mb_usage_without_subcommands__} \n{self.__mb_subcommands_list__}"
|
return f"{self.__mb_usage_without_subcommands__} \n{self.__mb_subcommands_list__}"
|
||||||
return self.__mb_usage_without_subcommands__
|
return self.__mb_usage_without_subcommands__
|
||||||
|
|
||||||
def subcommand(self, name: PrefixType = None, *, help: str = None, aliases: AliasesType = None,
|
def subcommand(
|
||||||
required_subcommand: bool = True, arg_fallthrough: bool = True,
|
self,
|
||||||
|
name: PrefixType = None,
|
||||||
|
*,
|
||||||
|
help: str = None,
|
||||||
|
aliases: AliasesType = None,
|
||||||
|
required_subcommand: bool = True,
|
||||||
|
arg_fallthrough: bool = True,
|
||||||
) -> CommandHandlerDecorator:
|
) -> CommandHandlerDecorator:
|
||||||
def decorator(func: Union[CommandHandler, CommandHandlerFunc]) -> CommandHandler:
|
def decorator(func: Union[CommandHandler, CommandHandlerFunc]) -> CommandHandler:
|
||||||
if not isinstance(func, CommandHandler):
|
if not isinstance(func, CommandHandler):
|
||||||
func = CommandHandler(func)
|
func = CommandHandler(func)
|
||||||
new(name, help=help, aliases=aliases, require_subcommand=required_subcommand,
|
new(
|
||||||
arg_fallthrough=arg_fallthrough)(func)
|
name,
|
||||||
|
help=help,
|
||||||
|
aliases=aliases,
|
||||||
|
require_subcommand=required_subcommand,
|
||||||
|
arg_fallthrough=arg_fallthrough,
|
||||||
|
)(func)
|
||||||
func.__mb_parent__ = self
|
func.__mb_parent__ = self
|
||||||
func.__mb_event_handler__ = False
|
func.__mb_event_handler__ = False
|
||||||
self.__mb_subcommands__.append(func)
|
self.__mb_subcommands__.append(func)
|
||||||
@ -206,9 +274,17 @@ class CommandHandler:
|
|||||||
return decorator
|
return decorator
|
||||||
|
|
||||||
|
|
||||||
def new(name: PrefixType = None, *, help: str = None, aliases: AliasesType = None,
|
def new(
|
||||||
event_type: EventType = EventType.ROOM_MESSAGE, msgtypes: Iterable[MessageType] = None,
|
name: PrefixType = None,
|
||||||
require_subcommand: bool = True, arg_fallthrough: bool = True) -> CommandHandlerDecorator:
|
*,
|
||||||
|
help: str = None,
|
||||||
|
aliases: AliasesType = None,
|
||||||
|
event_type: EventType = EventType.ROOM_MESSAGE,
|
||||||
|
msgtypes: Iterable[MessageType] = None,
|
||||||
|
require_subcommand: bool = True,
|
||||||
|
arg_fallthrough: bool = True,
|
||||||
|
must_consume_args: bool = True,
|
||||||
|
) -> CommandHandlerDecorator:
|
||||||
def decorator(func: Union[CommandHandler, CommandHandlerFunc]) -> CommandHandler:
|
def decorator(func: Union[CommandHandler, CommandHandlerFunc]) -> CommandHandler:
|
||||||
if not isinstance(func, CommandHandler):
|
if not isinstance(func, CommandHandler):
|
||||||
func = CommandHandler(func)
|
func = CommandHandler(func)
|
||||||
@ -222,22 +298,24 @@ def new(name: PrefixType = None, *, help: str = None, aliases: AliasesType = Non
|
|||||||
else:
|
else:
|
||||||
func.__mb_get_name__ = lambda self: name
|
func.__mb_get_name__ = lambda self: name
|
||||||
else:
|
else:
|
||||||
func.__mb_get_name__ = lambda self: func.__name__
|
func.__mb_get_name__ = lambda self: func.__mb_func__.__name__.replace("_", "-")
|
||||||
if callable(aliases):
|
if callable(aliases):
|
||||||
if len(inspect.getfullargspec(aliases).args) == 1:
|
if len(inspect.getfullargspec(aliases).args) == 1:
|
||||||
func.__mb_is_command_match__ = lambda self, val: aliases(val)
|
func.__mb_is_command_match__ = lambda self, val: aliases(val)
|
||||||
else:
|
else:
|
||||||
func.__mb_is_command_match__ = aliases
|
func.__mb_is_command_match__ = aliases
|
||||||
elif isinstance(aliases, (list, set, tuple)):
|
elif isinstance(aliases, (list, set, tuple)):
|
||||||
func.__mb_is_command_match__ = lambda self, val: (val == func.__mb_get_name__(self)
|
func.__mb_is_command_match__ = lambda self, val: (
|
||||||
or val in aliases)
|
val == func.__mb_get_name__(self) or val in aliases
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
func.__mb_is_command_match__ = lambda self, val: val == func.__mb_get_name__(self)
|
func.__mb_is_command_match__ = lambda self, val: val == func.__mb_get_name__(self)
|
||||||
# Decorators are executed last to first, so we reverse the argument list.
|
# Decorators are executed last to first, so we reverse the argument list.
|
||||||
func.__mb_arguments__.reverse()
|
func.__mb_arguments__.reverse()
|
||||||
func.__mb_require_subcommand__ = require_subcommand
|
func.__mb_require_subcommand__ = require_subcommand
|
||||||
func.__mb_arg_fallthrough__ = arg_fallthrough
|
func.__mb_arg_fallthrough__ = arg_fallthrough
|
||||||
func.__mb_event_type__ = event_type
|
func.__mb_must_consume_args__ = must_consume_args
|
||||||
|
func.__mb_event_types__ = {event_type}
|
||||||
if msgtypes:
|
if msgtypes:
|
||||||
func.__mb_msgtypes__ = msgtypes
|
func.__mb_msgtypes__ = msgtypes
|
||||||
return func
|
return func
|
||||||
@ -253,8 +331,9 @@ class ArgumentSyntaxError(ValueError):
|
|||||||
|
|
||||||
|
|
||||||
class Argument(ABC):
|
class Argument(ABC):
|
||||||
def __init__(self, name: str, label: str = None, *, required: bool = False,
|
def __init__(
|
||||||
pass_raw: bool = False) -> None:
|
self, name: str, label: str = None, *, required: bool = False, pass_raw: bool = False
|
||||||
|
) -> None:
|
||||||
self.name = name
|
self.name = name
|
||||||
self.label = label or name
|
self.label = label or name
|
||||||
self.required = required
|
self.required = required
|
||||||
@ -272,8 +351,15 @@ class Argument(ABC):
|
|||||||
|
|
||||||
|
|
||||||
class RegexArgument(Argument):
|
class RegexArgument(Argument):
|
||||||
def __init__(self, name: str, label: str = None, *, required: bool = False,
|
def __init__(
|
||||||
pass_raw: bool = False, matches: str = None) -> None:
|
self,
|
||||||
|
name: str,
|
||||||
|
label: str = None,
|
||||||
|
*,
|
||||||
|
required: bool = False,
|
||||||
|
pass_raw: bool = False,
|
||||||
|
matches: str = None,
|
||||||
|
) -> None:
|
||||||
super().__init__(name, label, required=required, pass_raw=pass_raw)
|
super().__init__(name, label, required=required, pass_raw=pass_raw)
|
||||||
matches = f"^{matches}" if self.pass_raw else f"^{matches}$"
|
matches = f"^{matches}" if self.pass_raw else f"^{matches}$"
|
||||||
self.regex = re.compile(matches)
|
self.regex = re.compile(matches)
|
||||||
@ -284,14 +370,23 @@ class RegexArgument(Argument):
|
|||||||
val = re.split(r"\s", val, 1)[0]
|
val = re.split(r"\s", val, 1)[0]
|
||||||
match = self.regex.match(val)
|
match = self.regex.match(val)
|
||||||
if match:
|
if match:
|
||||||
return (orig_val[:match.start()] + orig_val[match.end():],
|
return (
|
||||||
match.groups() or val[match.start():match.end()])
|
orig_val[: match.start()] + orig_val[match.end() :],
|
||||||
|
match.groups() or val[match.start() : match.end()],
|
||||||
|
)
|
||||||
return orig_val, None
|
return orig_val, None
|
||||||
|
|
||||||
|
|
||||||
class CustomArgument(Argument):
|
class CustomArgument(Argument):
|
||||||
def __init__(self, name: str, label: str = None, *, required: bool = False,
|
def __init__(
|
||||||
pass_raw: bool = False, matcher: Callable[[str], Any]) -> None:
|
self,
|
||||||
|
name: str,
|
||||||
|
label: str = None,
|
||||||
|
*,
|
||||||
|
required: bool = False,
|
||||||
|
pass_raw: bool = False,
|
||||||
|
matcher: Callable[[str], Any],
|
||||||
|
) -> None:
|
||||||
super().__init__(name, label, required=required, pass_raw=pass_raw)
|
super().__init__(name, label, required=required, pass_raw=pass_raw)
|
||||||
self.matcher = matcher
|
self.matcher = matcher
|
||||||
|
|
||||||
@ -301,8 +396,8 @@ class CustomArgument(Argument):
|
|||||||
orig_val = val
|
orig_val = val
|
||||||
val = re.split(r"\s", val, 1)[0]
|
val = re.split(r"\s", val, 1)[0]
|
||||||
res = self.matcher(val)
|
res = self.matcher(val)
|
||||||
if res:
|
if res is not None:
|
||||||
return orig_val[len(val):], res
|
return orig_val[len(val) :], res
|
||||||
return orig_val, None
|
return orig_val, None
|
||||||
|
|
||||||
|
|
||||||
@ -311,12 +406,18 @@ class SimpleArgument(Argument):
|
|||||||
if self.pass_raw:
|
if self.pass_raw:
|
||||||
return "", val
|
return "", val
|
||||||
res = re.split(r"\s", val, 1)[0]
|
res = re.split(r"\s", val, 1)[0]
|
||||||
return val[len(res):], res
|
return val[len(res) :], res
|
||||||
|
|
||||||
|
|
||||||
def argument(name: str, label: str = None, *, required: bool = True, matches: Optional[str] = None,
|
def argument(
|
||||||
parser: Optional[Callable[[str], Any]] = None, pass_raw: bool = False
|
name: str,
|
||||||
) -> CommandHandlerDecorator:
|
label: str = None,
|
||||||
|
*,
|
||||||
|
required: bool = True,
|
||||||
|
matches: Optional[str] = None,
|
||||||
|
parser: Optional[Callable[[str], Any]] = None,
|
||||||
|
pass_raw: bool = False,
|
||||||
|
) -> CommandHandlerDecorator:
|
||||||
if matches:
|
if matches:
|
||||||
return RegexArgument(name, label, required=required, matches=matches, pass_raw=pass_raw)
|
return RegexArgument(name, label, required=required, matches=matches, pass_raw=pass_raw)
|
||||||
elif parser:
|
elif parser:
|
||||||
@ -325,11 +426,17 @@ def argument(name: str, label: str = None, *, required: bool = True, matches: Op
|
|||||||
return SimpleArgument(name, label, required=required, pass_raw=pass_raw)
|
return SimpleArgument(name, label, required=required, pass_raw=pass_raw)
|
||||||
|
|
||||||
|
|
||||||
def passive(regex: Union[str, Pattern], *, msgtypes: Sequence[MessageType] = (MessageType.TEXT,),
|
def passive(
|
||||||
|
regex: Union[str, Pattern],
|
||||||
|
*,
|
||||||
|
msgtypes: Sequence[MessageType] = (MessageType.TEXT,),
|
||||||
field: Callable[[MaubotMessageEvent], str] = lambda evt: evt.content.body,
|
field: Callable[[MaubotMessageEvent], str] = lambda evt: evt.content.body,
|
||||||
event_type: EventType = EventType.ROOM_MESSAGE, multiple: bool = False,
|
event_type: EventType = EventType.ROOM_MESSAGE,
|
||||||
case_insensitive: bool = False, multiline: bool = False, dot_all: bool = False
|
multiple: bool = False,
|
||||||
) -> PassiveCommandHandlerDecorator:
|
case_insensitive: bool = False,
|
||||||
|
multiline: bool = False,
|
||||||
|
dot_all: bool = False,
|
||||||
|
) -> PassiveCommandHandlerDecorator:
|
||||||
if not isinstance(regex, Pattern):
|
if not isinstance(regex, Pattern):
|
||||||
flags = re.RegexFlag.UNICODE
|
flags = re.RegexFlag.UNICODE
|
||||||
if case_insensitive:
|
if case_insensitive:
|
||||||
@ -358,12 +465,14 @@ def passive(regex: Union[str, Pattern], *, msgtypes: Sequence[MessageType] = (Me
|
|||||||
return
|
return
|
||||||
data = field(evt)
|
data = field(evt)
|
||||||
if multiple:
|
if multiple:
|
||||||
val = [(data[match.pos:match.endpos], *match.groups())
|
val = [
|
||||||
for match in regex.finditer(data)]
|
(data[match.pos : match.endpos], *match.groups())
|
||||||
|
for match in regex.finditer(data)
|
||||||
|
]
|
||||||
else:
|
else:
|
||||||
match = regex.search(data)
|
match = regex.search(data)
|
||||||
if match:
|
if match:
|
||||||
val = (data[match.pos:match.endpos], *match.groups())
|
val = (data[match.pos : match.endpos], *match.groups())
|
||||||
else:
|
else:
|
||||||
val = None
|
val = None
|
||||||
if val:
|
if val:
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# maubot - A plugin-based Matrix bot system.
|
# maubot - A plugin-based Matrix bot system.
|
||||||
# Copyright (C) 2019 Tulir Asokan
|
# Copyright (C) 2022 Tulir Asokan
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
@ -13,23 +13,32 @@
|
|||||||
#
|
#
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
from typing import Callable, Union, NewType
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import Callable, NewType
|
||||||
|
|
||||||
from mautrix.types import EventType
|
|
||||||
from mautrix.client import EventHandler, InternalEventType
|
from mautrix.client import EventHandler, InternalEventType
|
||||||
|
from mautrix.types import EventType
|
||||||
|
|
||||||
EventHandlerDecorator = NewType("EventHandlerDecorator", Callable[[EventHandler], EventHandler])
|
EventHandlerDecorator = NewType("EventHandlerDecorator", Callable[[EventHandler], EventHandler])
|
||||||
|
|
||||||
|
|
||||||
def on(var: Union[EventType, InternalEventType, EventHandler]
|
def on(var: EventType | InternalEventType | EventHandler) -> EventHandlerDecorator | EventHandler:
|
||||||
) -> Union[EventHandlerDecorator, EventHandler]:
|
|
||||||
def decorator(func: EventHandler) -> EventHandler:
|
def decorator(func: EventHandler) -> EventHandler:
|
||||||
func.__mb_event_handler__ = True
|
func.__mb_event_handler__ = True
|
||||||
if isinstance(var, (EventType, InternalEventType)):
|
if isinstance(var, (EventType, InternalEventType)):
|
||||||
func.__mb_event_type__ = var
|
if hasattr(func, "__mb_event_types__"):
|
||||||
|
func.__mb_event_types__.add(var)
|
||||||
else:
|
else:
|
||||||
func.__mb_event_type__ = EventType.ALL
|
func.__mb_event_types__ = {var}
|
||||||
|
else:
|
||||||
|
func.__mb_event_types__ = {EventType.ALL}
|
||||||
|
|
||||||
return func
|
return func
|
||||||
|
|
||||||
return decorator if isinstance(var, (EventType, InternalEventType)) else decorator(var)
|
return decorator if isinstance(var, (EventType, InternalEventType)) else decorator(var)
|
||||||
|
|
||||||
|
|
||||||
|
def off(func: EventHandler) -> EventHandler:
|
||||||
|
func.__mb_event_handler__ = False
|
||||||
|
return func
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# maubot - A plugin-based Matrix bot system.
|
# maubot - A plugin-based Matrix bot system.
|
||||||
# Copyright (C) 2019 Tulir Asokan
|
# Copyright (C) 2022 Tulir Asokan
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
@ -13,9 +13,9 @@
|
|||||||
#
|
#
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
from typing import Callable, Any, Awaitable
|
from typing import Any, Awaitable, Callable
|
||||||
|
|
||||||
from aiohttp import web, hdrs
|
from aiohttp import hdrs, web
|
||||||
|
|
||||||
WebHandler = Callable[[web.Request], Awaitable[web.StreamResponse]]
|
WebHandler = Callable[[web.Request], Awaitable[web.StreamResponse]]
|
||||||
WebHandlerDecorator = Callable[[WebHandler], WebHandler]
|
WebHandlerDecorator = Callable[[WebHandler], WebHandler]
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# maubot - A plugin-based Matrix bot system.
|
# maubot - A plugin-based Matrix bot system.
|
||||||
# Copyright (C) 2019 Tulir Asokan
|
# Copyright (C) 2022 Tulir Asokan
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
@ -13,58 +13,92 @@
|
|||||||
#
|
#
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
from typing import Dict, List, Optional, Iterable, TYPE_CHECKING
|
from __future__ import annotations
|
||||||
from asyncio import AbstractEventLoop
|
|
||||||
import os.path
|
from typing import TYPE_CHECKING, Any, AsyncGenerator, cast
|
||||||
import logging
|
from collections import defaultdict
|
||||||
|
import asyncio
|
||||||
|
import inspect
|
||||||
import io
|
import io
|
||||||
|
import logging
|
||||||
|
import os.path
|
||||||
|
|
||||||
from ruamel.yaml.comments import CommentedMap
|
|
||||||
from ruamel.yaml import YAML
|
from ruamel.yaml import YAML
|
||||||
import sqlalchemy as sql
|
from ruamel.yaml.comments import CommentedMap
|
||||||
|
|
||||||
from mautrix.util.config import BaseProxyConfig, RecursiveDict
|
|
||||||
from mautrix.types import UserID
|
from mautrix.types import UserID
|
||||||
|
from mautrix.util import background_task
|
||||||
|
from mautrix.util.async_db import Database, Scheme, UpgradeTable
|
||||||
|
from mautrix.util.async_getter_lock import async_getter_lock
|
||||||
|
from mautrix.util.config import BaseProxyConfig, RecursiveDict
|
||||||
|
from mautrix.util.logging import TraceLogger
|
||||||
|
|
||||||
from .db import DBPlugin
|
|
||||||
from .config import Config
|
|
||||||
from .client import Client
|
from .client import Client
|
||||||
from .loader import PluginLoader, ZippedPluginLoader
|
from .db import DatabaseEngine, Instance as DBInstance
|
||||||
|
from .lib.optionalalchemy import Engine, MetaData, create_engine
|
||||||
|
from .lib.plugin_db import ProxyPostgresDatabase
|
||||||
|
from .loader import DatabaseType, PluginLoader, ZippedPluginLoader
|
||||||
from .plugin_base import Plugin
|
from .plugin_base import Plugin
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from .server import MaubotServer, PluginWebApp
|
from .__main__ import Maubot
|
||||||
|
from .server import PluginWebApp
|
||||||
|
|
||||||
log = logging.getLogger("maubot.instance")
|
log: TraceLogger = cast(TraceLogger, logging.getLogger("maubot.instance"))
|
||||||
|
db_log: TraceLogger = cast(TraceLogger, logging.getLogger("maubot.instance_db"))
|
||||||
|
|
||||||
yaml = YAML()
|
yaml = YAML()
|
||||||
yaml.indent(4)
|
yaml.indent(4)
|
||||||
yaml.width = 200
|
yaml.width = 200
|
||||||
|
|
||||||
|
|
||||||
class PluginInstance:
|
class PluginInstance(DBInstance):
|
||||||
webserver: 'MaubotServer' = None
|
maubot: "Maubot" = None
|
||||||
mb_config: Config = None
|
cache: dict[str, PluginInstance] = {}
|
||||||
loop: AbstractEventLoop = None
|
plugin_directories: list[str] = []
|
||||||
cache: Dict[str, 'PluginInstance'] = {}
|
_async_get_locks: dict[Any, asyncio.Lock] = defaultdict(lambda: asyncio.Lock())
|
||||||
plugin_directories: List[str] = []
|
|
||||||
|
|
||||||
log: logging.Logger
|
log: logging.Logger
|
||||||
loader: PluginLoader
|
loader: PluginLoader | None
|
||||||
client: Client
|
client: Client | None
|
||||||
plugin: Plugin
|
plugin: Plugin | None
|
||||||
config: BaseProxyConfig
|
config: BaseProxyConfig | None
|
||||||
base_cfg: Optional[RecursiveDict[CommentedMap]]
|
base_cfg: RecursiveDict[CommentedMap] | None
|
||||||
base_cfg_str: Optional[str]
|
base_cfg_str: str | None
|
||||||
inst_db: sql.engine.Engine
|
inst_db: sql.engine.Engine | Database | None
|
||||||
inst_db_tables: Dict[str, sql.Table]
|
inst_db_tables: dict | None
|
||||||
inst_webapp: Optional['PluginWebApp']
|
inst_webapp: PluginWebApp | None
|
||||||
inst_webapp_url: Optional[str]
|
inst_webapp_url: str | None
|
||||||
started: bool
|
started: bool
|
||||||
|
|
||||||
def __init__(self, db_instance: DBPlugin):
|
def __init__(
|
||||||
self.db_instance = db_instance
|
self,
|
||||||
|
id: str,
|
||||||
|
type: str,
|
||||||
|
enabled: bool,
|
||||||
|
primary_user: UserID,
|
||||||
|
config: str = "",
|
||||||
|
database_engine: DatabaseEngine | None = None,
|
||||||
|
) -> None:
|
||||||
|
super().__init__(
|
||||||
|
id=id,
|
||||||
|
type=type,
|
||||||
|
enabled=bool(enabled),
|
||||||
|
primary_user=primary_user,
|
||||||
|
config_str=config,
|
||||||
|
database_engine=database_engine,
|
||||||
|
)
|
||||||
|
|
||||||
|
def __hash__(self) -> int:
|
||||||
|
return hash(self.id)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def init_cls(cls, maubot: "Maubot") -> None:
|
||||||
|
cls.maubot = maubot
|
||||||
|
|
||||||
|
def postinit(self) -> None:
|
||||||
self.log = log.getChild(self.id)
|
self.log = log.getChild(self.id)
|
||||||
|
self.cache[self.id] = self
|
||||||
self.config = None
|
self.config = None
|
||||||
self.started = False
|
self.started = False
|
||||||
self.loader = None
|
self.loader = None
|
||||||
@ -76,7 +110,6 @@ class PluginInstance:
|
|||||||
self.inst_webapp_url = None
|
self.inst_webapp_url = None
|
||||||
self.base_cfg = None
|
self.base_cfg = None
|
||||||
self.base_cfg_str = None
|
self.base_cfg_str = None
|
||||||
self.cache[self.id] = self
|
|
||||||
|
|
||||||
def to_dict(self) -> dict:
|
def to_dict(self) -> dict:
|
||||||
return {
|
return {
|
||||||
@ -85,44 +118,144 @@ class PluginInstance:
|
|||||||
"enabled": self.enabled,
|
"enabled": self.enabled,
|
||||||
"started": self.started,
|
"started": self.started,
|
||||||
"primary_user": self.primary_user,
|
"primary_user": self.primary_user,
|
||||||
"config": self.db_instance.config,
|
"config": self.config_str,
|
||||||
"base_config": self.base_cfg_str,
|
"base_config": self.base_cfg_str,
|
||||||
"database": (self.inst_db is not None
|
"database": (
|
||||||
and self.mb_config["api_features.instance_database"]),
|
self.inst_db is not None and self.maubot.config["api_features.instance_database"]
|
||||||
|
),
|
||||||
|
"database_interface": self.loader.meta.database_type_str if self.loader else "unknown",
|
||||||
|
"database_engine": self.database_engine_str,
|
||||||
}
|
}
|
||||||
|
|
||||||
def get_db_tables(self) -> Dict[str, sql.Table]:
|
def _introspect_sqlalchemy(self) -> dict:
|
||||||
if not self.inst_db_tables:
|
metadata = MetaData()
|
||||||
metadata = sql.MetaData()
|
|
||||||
metadata.reflect(self.inst_db)
|
metadata.reflect(self.inst_db)
|
||||||
self.inst_db_tables = metadata.tables
|
return {
|
||||||
|
table.name: {
|
||||||
|
"columns": {
|
||||||
|
column.name: {
|
||||||
|
"type": str(column.type),
|
||||||
|
"unique": column.unique or False,
|
||||||
|
"default": column.default,
|
||||||
|
"nullable": column.nullable,
|
||||||
|
"primary": column.primary_key,
|
||||||
|
}
|
||||||
|
for column in table.columns
|
||||||
|
},
|
||||||
|
}
|
||||||
|
for table in metadata.tables.values()
|
||||||
|
}
|
||||||
|
|
||||||
|
async def _introspect_sqlite(self) -> dict:
|
||||||
|
q = """
|
||||||
|
SELECT
|
||||||
|
m.name AS table_name,
|
||||||
|
p.cid AS col_id,
|
||||||
|
p.name AS column_name,
|
||||||
|
p.type AS data_type,
|
||||||
|
p.pk AS is_primary,
|
||||||
|
p.dflt_value AS column_default,
|
||||||
|
p.[notnull] AS is_nullable
|
||||||
|
FROM sqlite_master m
|
||||||
|
LEFT JOIN pragma_table_info((m.name)) p
|
||||||
|
WHERE m.type = 'table'
|
||||||
|
ORDER BY table_name, col_id
|
||||||
|
"""
|
||||||
|
data = await self.inst_db.fetch(q)
|
||||||
|
tables = defaultdict(lambda: {"columns": {}})
|
||||||
|
for column in data:
|
||||||
|
table_name = column["table_name"]
|
||||||
|
col_name = column["column_name"]
|
||||||
|
tables[table_name]["columns"][col_name] = {
|
||||||
|
"type": column["data_type"],
|
||||||
|
"nullable": bool(column["is_nullable"]),
|
||||||
|
"default": column["column_default"],
|
||||||
|
"primary": bool(column["is_primary"]),
|
||||||
|
# TODO uniqueness?
|
||||||
|
}
|
||||||
|
return tables
|
||||||
|
|
||||||
|
async def _introspect_postgres(self) -> dict:
|
||||||
|
assert isinstance(self.inst_db, ProxyPostgresDatabase)
|
||||||
|
q = """
|
||||||
|
SELECT col.table_name, col.column_name, col.data_type, col.is_nullable, col.column_default,
|
||||||
|
tc.constraint_type
|
||||||
|
FROM information_schema.columns col
|
||||||
|
LEFT JOIN information_schema.constraint_column_usage ccu
|
||||||
|
ON ccu.column_name=col.column_name
|
||||||
|
LEFT JOIN information_schema.table_constraints tc
|
||||||
|
ON col.table_name=tc.table_name
|
||||||
|
AND col.table_schema=tc.table_schema
|
||||||
|
AND ccu.constraint_name=tc.constraint_name
|
||||||
|
AND ccu.constraint_schema=tc.constraint_schema
|
||||||
|
AND tc.constraint_type IN ('PRIMARY KEY', 'UNIQUE')
|
||||||
|
WHERE col.table_schema=$1
|
||||||
|
"""
|
||||||
|
data = await self.inst_db.fetch(q, self.inst_db.schema_name)
|
||||||
|
tables = defaultdict(lambda: {"columns": {}})
|
||||||
|
for column in data:
|
||||||
|
table_name = column["table_name"]
|
||||||
|
col_name = column["column_name"]
|
||||||
|
tables[table_name]["columns"].setdefault(
|
||||||
|
col_name,
|
||||||
|
{
|
||||||
|
"type": column["data_type"],
|
||||||
|
"nullable": column["is_nullable"],
|
||||||
|
"default": column["column_default"],
|
||||||
|
"primary": False,
|
||||||
|
"unique": False,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
if column["constraint_type"] == "PRIMARY KEY":
|
||||||
|
tables[table_name]["columns"][col_name]["primary"] = True
|
||||||
|
elif column["constraint_type"] == "UNIQUE":
|
||||||
|
tables[table_name]["columns"][col_name]["unique"] = True
|
||||||
|
return tables
|
||||||
|
|
||||||
|
async def get_db_tables(self) -> dict:
|
||||||
|
if self.inst_db_tables is None:
|
||||||
|
if isinstance(self.inst_db, Engine):
|
||||||
|
self.inst_db_tables = self._introspect_sqlalchemy()
|
||||||
|
elif self.inst_db.scheme == Scheme.SQLITE:
|
||||||
|
self.inst_db_tables = await self._introspect_sqlite()
|
||||||
|
else:
|
||||||
|
self.inst_db_tables = await self._introspect_postgres()
|
||||||
return self.inst_db_tables
|
return self.inst_db_tables
|
||||||
|
|
||||||
def load(self) -> bool:
|
async def load(self) -> bool:
|
||||||
if not self.loader:
|
if not self.loader:
|
||||||
try:
|
try:
|
||||||
self.loader = PluginLoader.find(self.type)
|
self.loader = PluginLoader.find(self.type)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
self.log.error(f"Failed to find loader for type {self.type}")
|
self.log.error(f"Failed to find loader for type {self.type}")
|
||||||
self.db_instance.enabled = False
|
await self.update_enabled(False)
|
||||||
return False
|
return False
|
||||||
if not self.client:
|
if not self.client:
|
||||||
self.client = Client.get(self.primary_user)
|
self.client = await Client.get(self.primary_user)
|
||||||
if not self.client:
|
if not self.client:
|
||||||
self.log.error(f"Failed to get client for user {self.primary_user}")
|
self.log.error(f"Failed to get client for user {self.primary_user}")
|
||||||
self.db_instance.enabled = False
|
await self.update_enabled(False)
|
||||||
return False
|
return False
|
||||||
if self.loader.meta.database:
|
|
||||||
db_path = os.path.join(self.mb_config["plugin_directories.db"], self.id)
|
|
||||||
self.inst_db = sql.create_engine(f"sqlite:///{db_path}.db")
|
|
||||||
if self.loader.meta.webapp:
|
if self.loader.meta.webapp:
|
||||||
self.inst_webapp, self.inst_webapp_url = self.webserver.get_instance_subapp(self.id)
|
self.enable_webapp()
|
||||||
self.log.debug("Plugin instance dependencies loaded")
|
self.log.debug("Plugin instance dependencies loaded")
|
||||||
self.loader.references.add(self)
|
self.loader.references.add(self)
|
||||||
self.client.references.add(self)
|
self.client.references.add(self)
|
||||||
return True
|
return True
|
||||||
|
|
||||||
def delete(self) -> None:
|
def enable_webapp(self) -> None:
|
||||||
|
self.inst_webapp, self.inst_webapp_url = self.maubot.server.get_instance_subapp(self.id)
|
||||||
|
|
||||||
|
def disable_webapp(self) -> None:
|
||||||
|
self.maubot.server.remove_instance_webapp(self.id)
|
||||||
|
self.inst_webapp = None
|
||||||
|
self.inst_webapp_url = None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _sqlite_db_path(self) -> str:
|
||||||
|
return os.path.join(self.maubot.config["plugin_databases.sqlite"], f"{self.id}.db")
|
||||||
|
|
||||||
|
async def delete(self) -> None:
|
||||||
if self.loader is not None:
|
if self.loader is not None:
|
||||||
self.loader.references.remove(self)
|
self.loader.references.remove(self)
|
||||||
if self.client is not None:
|
if self.client is not None:
|
||||||
@ -131,22 +264,89 @@ class PluginInstance:
|
|||||||
del self.cache[self.id]
|
del self.cache[self.id]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
self.db_instance.delete()
|
await super().delete()
|
||||||
if self.inst_db:
|
if self.inst_db:
|
||||||
self.inst_db.dispose()
|
await self.stop_database()
|
||||||
ZippedPluginLoader.trash(
|
await self.delete_database()
|
||||||
os.path.join(self.mb_config["plugin_directories.db"], f"{self.id}.db"),
|
|
||||||
reason="deleted")
|
|
||||||
if self.inst_webapp:
|
if self.inst_webapp:
|
||||||
self.webserver.remove_instance_webapp(self.id)
|
self.disable_webapp()
|
||||||
|
|
||||||
def load_config(self) -> CommentedMap:
|
def load_config(self) -> CommentedMap:
|
||||||
return yaml.load(self.db_instance.config)
|
return yaml.load(self.config_str)
|
||||||
|
|
||||||
def save_config(self, data: RecursiveDict[CommentedMap]) -> None:
|
def save_config(self, data: RecursiveDict[CommentedMap]) -> None:
|
||||||
buf = io.StringIO()
|
buf = io.StringIO()
|
||||||
yaml.dump(data, buf)
|
yaml.dump(data, buf)
|
||||||
self.db_instance.config = buf.getvalue()
|
val = buf.getvalue()
|
||||||
|
if val != self.config_str:
|
||||||
|
self.config_str = val
|
||||||
|
self.log.debug("Creating background task to save updated config")
|
||||||
|
background_task.create(self.update())
|
||||||
|
|
||||||
|
async def start_database(
|
||||||
|
self, upgrade_table: UpgradeTable | None = None, actually_start: bool = True
|
||||||
|
) -> None:
|
||||||
|
if self.loader.meta.database_type == DatabaseType.SQLALCHEMY:
|
||||||
|
if self.database_engine is None:
|
||||||
|
await self.update_db_engine(DatabaseEngine.SQLITE)
|
||||||
|
elif self.database_engine == DatabaseEngine.POSTGRES:
|
||||||
|
raise RuntimeError(
|
||||||
|
"Instance database engine is marked as Postgres, but plugin uses legacy "
|
||||||
|
"database interface, which doesn't support postgres."
|
||||||
|
)
|
||||||
|
self.inst_db = create_engine(f"sqlite:///{self._sqlite_db_path}")
|
||||||
|
elif self.loader.meta.database_type == DatabaseType.ASYNCPG:
|
||||||
|
if self.database_engine is None:
|
||||||
|
if os.path.exists(self._sqlite_db_path) or not self.maubot.plugin_postgres_db:
|
||||||
|
await self.update_db_engine(DatabaseEngine.SQLITE)
|
||||||
|
else:
|
||||||
|
await self.update_db_engine(DatabaseEngine.POSTGRES)
|
||||||
|
instance_db_log = db_log.getChild(self.id)
|
||||||
|
if self.database_engine == DatabaseEngine.POSTGRES:
|
||||||
|
if not self.maubot.plugin_postgres_db:
|
||||||
|
raise RuntimeError(
|
||||||
|
"Instance database engine is marked as Postgres, but this maubot isn't "
|
||||||
|
"configured to support Postgres for plugin databases"
|
||||||
|
)
|
||||||
|
self.inst_db = ProxyPostgresDatabase(
|
||||||
|
pool=self.maubot.plugin_postgres_db,
|
||||||
|
instance_id=self.id,
|
||||||
|
max_conns=self.maubot.config["plugin_databases.postgres_max_conns_per_plugin"],
|
||||||
|
upgrade_table=upgrade_table,
|
||||||
|
log=instance_db_log,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.inst_db = Database.create(
|
||||||
|
f"sqlite:{self._sqlite_db_path}",
|
||||||
|
upgrade_table=upgrade_table,
|
||||||
|
log=instance_db_log,
|
||||||
|
)
|
||||||
|
if actually_start:
|
||||||
|
await self.inst_db.start()
|
||||||
|
else:
|
||||||
|
raise RuntimeError(f"Unrecognized database type {self.loader.meta.database_type}")
|
||||||
|
|
||||||
|
async def stop_database(self) -> None:
|
||||||
|
if isinstance(self.inst_db, Database):
|
||||||
|
await self.inst_db.stop()
|
||||||
|
elif isinstance(self.inst_db, Engine):
|
||||||
|
self.inst_db.dispose()
|
||||||
|
else:
|
||||||
|
raise RuntimeError(f"Unknown database type {type(self.inst_db).__name__}")
|
||||||
|
|
||||||
|
async def delete_database(self) -> None:
|
||||||
|
if self.loader.meta.database_type == DatabaseType.SQLALCHEMY:
|
||||||
|
ZippedPluginLoader.trash(self._sqlite_db_path, reason="deleted")
|
||||||
|
elif self.loader.meta.database_type == DatabaseType.ASYNCPG:
|
||||||
|
if self.inst_db is None:
|
||||||
|
await self.start_database(None, actually_start=False)
|
||||||
|
if isinstance(self.inst_db, ProxyPostgresDatabase):
|
||||||
|
await self.inst_db.delete()
|
||||||
|
else:
|
||||||
|
ZippedPluginLoader.trash(self._sqlite_db_path, reason="deleted")
|
||||||
|
else:
|
||||||
|
raise RuntimeError(f"Unrecognized database type {self.loader.meta.database_type}")
|
||||||
|
self.inst_db = None
|
||||||
|
|
||||||
async def start(self) -> None:
|
async def start(self) -> None:
|
||||||
if self.started:
|
if self.started:
|
||||||
@ -157,9 +357,22 @@ class PluginInstance:
|
|||||||
return
|
return
|
||||||
if not self.client or not self.loader:
|
if not self.client or not self.loader:
|
||||||
self.log.warning("Missing plugin instance dependencies, attempting to load...")
|
self.log.warning("Missing plugin instance dependencies, attempting to load...")
|
||||||
if not self.load():
|
if not await self.load():
|
||||||
return
|
return
|
||||||
cls = await self.loader.load()
|
cls = await self.loader.load()
|
||||||
|
if self.loader.meta.webapp and self.inst_webapp is None:
|
||||||
|
self.log.debug("Enabling webapp after plugin meta reload")
|
||||||
|
self.enable_webapp()
|
||||||
|
elif not self.loader.meta.webapp and self.inst_webapp is not None:
|
||||||
|
self.log.debug("Disabling webapp after plugin meta reload")
|
||||||
|
self.disable_webapp()
|
||||||
|
if self.loader.meta.database:
|
||||||
|
try:
|
||||||
|
await self.start_database(cls.get_db_upgrade_table())
|
||||||
|
except Exception:
|
||||||
|
self.log.exception("Failed to start instance database")
|
||||||
|
await self.update_enabled(False)
|
||||||
|
return
|
||||||
config_class = cls.get_config_class()
|
config_class = cls.get_config_class()
|
||||||
if config_class:
|
if config_class:
|
||||||
try:
|
try:
|
||||||
@ -174,23 +387,35 @@ class PluginInstance:
|
|||||||
if self.base_cfg:
|
if self.base_cfg:
|
||||||
base_cfg_func = self.base_cfg.clone
|
base_cfg_func = self.base_cfg.clone
|
||||||
else:
|
else:
|
||||||
|
|
||||||
def base_cfg_func() -> None:
|
def base_cfg_func() -> None:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
self.config = config_class(self.load_config, base_cfg_func, self.save_config)
|
self.config = config_class(self.load_config, base_cfg_func, self.save_config)
|
||||||
self.plugin = cls(client=self.client.client, loop=self.loop, http=self.client.http_client,
|
self.plugin = cls(
|
||||||
instance_id=self.id, log=self.log, config=self.config,
|
client=self.client.client,
|
||||||
database=self.inst_db, webapp=self.inst_webapp,
|
loop=self.maubot.loop,
|
||||||
webapp_url=self.inst_webapp_url)
|
http=self.client.http_client,
|
||||||
|
instance_id=self.id,
|
||||||
|
log=self.log,
|
||||||
|
config=self.config,
|
||||||
|
database=self.inst_db,
|
||||||
|
loader=self.loader,
|
||||||
|
webapp=self.inst_webapp,
|
||||||
|
webapp_url=self.inst_webapp_url,
|
||||||
|
)
|
||||||
try:
|
try:
|
||||||
await self.plugin.internal_start()
|
await self.plugin.internal_start()
|
||||||
except Exception:
|
except Exception:
|
||||||
self.log.exception("Failed to start instance")
|
self.log.exception("Failed to start instance")
|
||||||
self.db_instance.enabled = False
|
await self.update_enabled(False)
|
||||||
return
|
return
|
||||||
self.started = True
|
self.started = True
|
||||||
self.inst_db_tables = None
|
self.inst_db_tables = None
|
||||||
self.log.info(f"Started instance of {self.loader.meta.id} v{self.loader.meta.version} "
|
self.log.info(
|
||||||
f"with user {self.client.id}")
|
f"Started instance of {self.loader.meta.id} v{self.loader.meta.version} "
|
||||||
|
f"with user {self.client.id}"
|
||||||
|
)
|
||||||
|
|
||||||
async def stop(self) -> None:
|
async def stop(self) -> None:
|
||||||
if not self.started:
|
if not self.started:
|
||||||
@ -203,63 +428,58 @@ class PluginInstance:
|
|||||||
except Exception:
|
except Exception:
|
||||||
self.log.exception("Failed to stop instance")
|
self.log.exception("Failed to stop instance")
|
||||||
self.plugin = None
|
self.plugin = None
|
||||||
|
if self.inst_db:
|
||||||
|
try:
|
||||||
|
await self.stop_database()
|
||||||
|
except Exception:
|
||||||
|
self.log.exception("Failed to stop instance database")
|
||||||
self.inst_db_tables = None
|
self.inst_db_tables = None
|
||||||
|
|
||||||
@classmethod
|
async def update_id(self, new_id: str | None) -> None:
|
||||||
def get(cls, instance_id: str, db_instance: Optional[DBPlugin] = None
|
if new_id is not None and new_id.lower() != self.id:
|
||||||
) -> Optional['PluginInstance']:
|
await super().update_id(new_id.lower())
|
||||||
try:
|
|
||||||
return cls.cache[instance_id]
|
|
||||||
except KeyError:
|
|
||||||
db_instance = db_instance or DBPlugin.get(instance_id)
|
|
||||||
if not db_instance:
|
|
||||||
return None
|
|
||||||
return PluginInstance(db_instance)
|
|
||||||
|
|
||||||
@classmethod
|
async def update_config(self, config: str | None) -> None:
|
||||||
def all(cls) -> Iterable['PluginInstance']:
|
if config is None or self.config_str == config:
|
||||||
return (cls.get(plugin.id, plugin) for plugin in DBPlugin.all())
|
|
||||||
|
|
||||||
def update_id(self, new_id: str) -> None:
|
|
||||||
if new_id is not None and new_id != self.id:
|
|
||||||
self.db_instance.id = new_id
|
|
||||||
|
|
||||||
def update_config(self, config: str) -> None:
|
|
||||||
if not config or self.db_instance.config == config:
|
|
||||||
return
|
return
|
||||||
self.db_instance.config = config
|
self.config_str = config
|
||||||
if self.started and self.plugin is not None:
|
if self.started and self.plugin is not None:
|
||||||
self.plugin.on_external_config_update()
|
res = self.plugin.on_external_config_update()
|
||||||
|
if inspect.isawaitable(res):
|
||||||
|
await res
|
||||||
|
await self.update()
|
||||||
|
|
||||||
async def update_primary_user(self, primary_user: UserID) -> bool:
|
async def update_primary_user(self, primary_user: UserID | None) -> bool:
|
||||||
if not primary_user or primary_user == self.primary_user:
|
if primary_user is None or primary_user == self.primary_user:
|
||||||
return True
|
return True
|
||||||
client = Client.get(primary_user)
|
client = await Client.get(primary_user)
|
||||||
if not client:
|
if not client:
|
||||||
return False
|
return False
|
||||||
await self.stop()
|
await self.stop()
|
||||||
self.db_instance.primary_user = client.id
|
self.primary_user = client.id
|
||||||
if self.client:
|
if self.client:
|
||||||
self.client.references.remove(self)
|
self.client.references.remove(self)
|
||||||
self.client = client
|
self.client = client
|
||||||
self.client.references.add(self)
|
self.client.references.add(self)
|
||||||
|
await self.update()
|
||||||
await self.start()
|
await self.start()
|
||||||
self.log.debug(f"Primary user switched to {self.client.id}")
|
self.log.debug(f"Primary user switched to {self.client.id}")
|
||||||
return True
|
return True
|
||||||
|
|
||||||
async def update_type(self, type: str) -> bool:
|
async def update_type(self, type: str | None) -> bool:
|
||||||
if not type or type == self.type:
|
if type is None or type == self.type:
|
||||||
return True
|
return True
|
||||||
try:
|
try:
|
||||||
loader = PluginLoader.find(type)
|
loader = PluginLoader.find(type)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
return False
|
return False
|
||||||
await self.stop()
|
await self.stop()
|
||||||
self.db_instance.type = loader.meta.id
|
self.type = loader.meta.id
|
||||||
if self.loader:
|
if self.loader:
|
||||||
self.loader.references.remove(self)
|
self.loader.references.remove(self)
|
||||||
self.loader = loader
|
self.loader = loader
|
||||||
self.loader.references.add(self)
|
self.loader.references.add(self)
|
||||||
|
await self.update()
|
||||||
await self.start()
|
await self.start()
|
||||||
self.log.debug(f"Type switched to {self.loader.meta.id}")
|
self.log.debug(f"Type switched to {self.loader.meta.id}")
|
||||||
return True
|
return True
|
||||||
@ -268,38 +488,46 @@ class PluginInstance:
|
|||||||
if started is not None and started != self.started:
|
if started is not None and started != self.started:
|
||||||
await (self.start() if started else self.stop())
|
await (self.start() if started else self.stop())
|
||||||
|
|
||||||
def update_enabled(self, enabled: bool) -> None:
|
async def update_enabled(self, enabled: bool) -> None:
|
||||||
if enabled is not None and enabled != self.enabled:
|
if enabled is not None and enabled != self.enabled:
|
||||||
self.db_instance.enabled = enabled
|
self.enabled = enabled
|
||||||
|
await self.update()
|
||||||
|
|
||||||
# region Properties
|
async def update_db_engine(self, db_engine: DatabaseEngine | None) -> None:
|
||||||
|
if db_engine is not None and db_engine != self.database_engine:
|
||||||
|
self.database_engine = db_engine
|
||||||
|
await self.update()
|
||||||
|
|
||||||
@property
|
@classmethod
|
||||||
def id(self) -> str:
|
@async_getter_lock
|
||||||
return self.db_instance.id
|
async def get(
|
||||||
|
cls, instance_id: str, *, type: str | None = None, primary_user: UserID | None = None
|
||||||
|
) -> PluginInstance | None:
|
||||||
|
try:
|
||||||
|
return cls.cache[instance_id]
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
|
||||||
@id.setter
|
instance = cast(cls, await super().get(instance_id))
|
||||||
def id(self, value: str) -> None:
|
if instance is not None:
|
||||||
self.db_instance.id = value
|
instance.postinit()
|
||||||
|
return instance
|
||||||
|
|
||||||
@property
|
if type and primary_user:
|
||||||
def type(self) -> str:
|
instance = cls(instance_id, type=type, enabled=True, primary_user=primary_user)
|
||||||
return self.db_instance.type
|
await instance.insert()
|
||||||
|
instance.postinit()
|
||||||
|
return instance
|
||||||
|
|
||||||
@property
|
return None
|
||||||
def enabled(self) -> bool:
|
|
||||||
return self.db_instance.enabled
|
|
||||||
|
|
||||||
@property
|
@classmethod
|
||||||
def primary_user(self) -> UserID:
|
async def all(cls) -> AsyncGenerator[PluginInstance, None]:
|
||||||
return self.db_instance.primary_user
|
instances = await super().all()
|
||||||
|
instance: PluginInstance
|
||||||
# endregion
|
for instance in instances:
|
||||||
|
try:
|
||||||
|
yield cls.cache[instance.id]
|
||||||
def init(config: Config, webserver: 'MaubotServer', loop: AbstractEventLoop
|
except KeyError:
|
||||||
) -> Iterable[PluginInstance]:
|
instance.postinit()
|
||||||
PluginInstance.mb_config = config
|
yield instance
|
||||||
PluginInstance.loop = loop
|
|
||||||
PluginInstance.webserver = webserver
|
|
||||||
return PluginInstance.all()
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# maubot - A plugin-based Matrix bot system.
|
# maubot - A plugin-based Matrix bot system.
|
||||||
# Copyright (C) 2019 Tulir Asokan
|
# Copyright (C) 2022 Tulir Asokan
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
@ -13,8 +13,13 @@
|
|||||||
#
|
#
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
from mautrix.util.color_log import (ColorFormatter as BaseColorFormatter, PREFIX, MAU_COLOR,
|
from mautrix.util.logging.color import (
|
||||||
MXID_COLOR, RESET)
|
MAU_COLOR,
|
||||||
|
MXID_COLOR,
|
||||||
|
PREFIX,
|
||||||
|
RESET,
|
||||||
|
ColorFormatter as BaseColorFormatter,
|
||||||
|
)
|
||||||
|
|
||||||
INST_COLOR = PREFIX + "35m" # magenta
|
INST_COLOR = PREFIX + "35m" # magenta
|
||||||
LOADER_COLOR = PREFIX + "36m" # blue
|
LOADER_COLOR = PREFIX + "36m" # blue
|
||||||
@ -23,14 +28,22 @@ LOADER_COLOR = PREFIX + "36m" # blue
|
|||||||
class ColorFormatter(BaseColorFormatter):
|
class ColorFormatter(BaseColorFormatter):
|
||||||
def _color_name(self, module: str) -> str:
|
def _color_name(self, module: str) -> str:
|
||||||
client = "maubot.client"
|
client = "maubot.client"
|
||||||
if module.startswith(client):
|
if module.startswith(client + "."):
|
||||||
return f"{MAU_COLOR}{client}{RESET}.{MXID_COLOR}{module[len(client) + 1:]}{RESET}"
|
suffix = ""
|
||||||
|
if module.endswith(".crypto"):
|
||||||
|
suffix = f".{MAU_COLOR}crypto{RESET}"
|
||||||
|
module = module[: -len(".crypto")]
|
||||||
|
module = module[len(client) + 1 :]
|
||||||
|
return f"{MAU_COLOR}{client}{RESET}.{MXID_COLOR}{module}{RESET}{suffix}"
|
||||||
instance = "maubot.instance"
|
instance = "maubot.instance"
|
||||||
if module.startswith(instance):
|
if module.startswith(instance + "."):
|
||||||
return f"{MAU_COLOR}{instance}{RESET}.{INST_COLOR}{module[len(instance) + 1:]}{RESET}"
|
return f"{MAU_COLOR}{instance}{RESET}.{INST_COLOR}{module[len(instance) + 1:]}{RESET}"
|
||||||
|
instance_db = "maubot.instance_db"
|
||||||
|
if module.startswith(instance_db + "."):
|
||||||
|
return f"{MAU_COLOR}{instance_db}{RESET}.{INST_COLOR}{module[len(instance_db) + 1:]}{RESET}"
|
||||||
loader = "maubot.loader"
|
loader = "maubot.loader"
|
||||||
if module.startswith(loader):
|
if module.startswith(loader + "."):
|
||||||
return f"{MAU_COLOR}{instance}{RESET}.{LOADER_COLOR}{module[len(loader) + 1:]}{RESET}"
|
return f"{MAU_COLOR}{instance}{RESET}.{LOADER_COLOR}{module[len(loader) + 1:]}{RESET}"
|
||||||
if module.startswith("maubot"):
|
if module.startswith("maubot."):
|
||||||
return f"{MAU_COLOR}{module}{RESET}"
|
return f"{MAU_COLOR}{module}{RESET}"
|
||||||
return super()._color_name(module)
|
return super()._color_name(module)
|
||||||
|
9
maubot/lib/future_awaitable.py
Normal file
9
maubot/lib/future_awaitable.py
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
from typing import Any, Awaitable, Callable, Generator
|
||||||
|
|
||||||
|
|
||||||
|
class FutureAwaitable:
|
||||||
|
def __init__(self, func: Callable[[], Awaitable[None]]) -> None:
|
||||||
|
self._func = func
|
||||||
|
|
||||||
|
def __await__(self) -> Generator[Any, None, None]:
|
||||||
|
return self._func().__await__()
|
19
maubot/lib/optionalalchemy.py
Normal file
19
maubot/lib/optionalalchemy.py
Normal file
@ -0,0 +1,19 @@
|
|||||||
|
try:
|
||||||
|
from sqlalchemy import MetaData, asc, create_engine, desc
|
||||||
|
from sqlalchemy.engine import Engine
|
||||||
|
from sqlalchemy.exc import IntegrityError, OperationalError
|
||||||
|
except ImportError:
|
||||||
|
|
||||||
|
class FakeError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class FakeType:
|
||||||
|
def __init__(self, *args, **kwargs):
|
||||||
|
raise Exception("SQLAlchemy is not installed")
|
||||||
|
|
||||||
|
def create_engine(*args, **kwargs):
|
||||||
|
raise Exception("SQLAlchemy is not installed")
|
||||||
|
|
||||||
|
MetaData = Engine = FakeType
|
||||||
|
IntegrityError = OperationalError = FakeError
|
||||||
|
asc = desc = lambda a: a
|
100
maubot/lib/plugin_db.py
Normal file
100
maubot/lib/plugin_db.py
Normal file
@ -0,0 +1,100 @@
|
|||||||
|
# maubot - A plugin-based Matrix bot system.
|
||||||
|
# Copyright (C) 2022 Tulir Asokan
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from contextlib import asynccontextmanager
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
from mautrix.util.async_db import Database, PostgresDatabase, Scheme, UpgradeTable
|
||||||
|
from mautrix.util.async_db.connection import LoggingConnection
|
||||||
|
from mautrix.util.logging import TraceLogger
|
||||||
|
|
||||||
|
remove_double_quotes = str.maketrans({'"': "_"})
|
||||||
|
|
||||||
|
|
||||||
|
class ProxyPostgresDatabase(Database):
|
||||||
|
scheme = Scheme.POSTGRES
|
||||||
|
_underlying_pool: PostgresDatabase
|
||||||
|
schema_name: str
|
||||||
|
_quoted_schema: str
|
||||||
|
_default_search_path: str
|
||||||
|
_conn_sema: asyncio.Semaphore
|
||||||
|
_max_conns: int
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
pool: PostgresDatabase,
|
||||||
|
instance_id: str,
|
||||||
|
max_conns: int,
|
||||||
|
upgrade_table: UpgradeTable | None,
|
||||||
|
log: TraceLogger | None = None,
|
||||||
|
) -> None:
|
||||||
|
super().__init__(pool.url, upgrade_table=upgrade_table, log=log)
|
||||||
|
self._underlying_pool = pool
|
||||||
|
# Simple accidental SQL injection prevention.
|
||||||
|
# Doesn't have to be perfect, since plugin instance IDs can only be set by admins anyway.
|
||||||
|
self.schema_name = f"mbp_{instance_id.translate(remove_double_quotes)}"
|
||||||
|
self._quoted_schema = f'"{self.schema_name}"'
|
||||||
|
self._default_search_path = '"$user", public'
|
||||||
|
self._conn_sema = asyncio.BoundedSemaphore(max_conns)
|
||||||
|
self._max_conns = max_conns
|
||||||
|
|
||||||
|
async def start(self) -> None:
|
||||||
|
async with self._underlying_pool.acquire() as conn:
|
||||||
|
self._default_search_path = await conn.fetchval("SHOW search_path")
|
||||||
|
self.log.trace(f"Found default search path: {self._default_search_path}")
|
||||||
|
await conn.execute(f"CREATE SCHEMA IF NOT EXISTS {self._quoted_schema}")
|
||||||
|
await super().start()
|
||||||
|
|
||||||
|
async def stop(self) -> None:
|
||||||
|
for _ in range(self._max_conns):
|
||||||
|
try:
|
||||||
|
await asyncio.wait_for(self._conn_sema.acquire(), timeout=3)
|
||||||
|
except asyncio.TimeoutError:
|
||||||
|
self.log.warning(
|
||||||
|
"Failed to drain plugin database connection pool, "
|
||||||
|
"the plugin may be leaking database connections"
|
||||||
|
)
|
||||||
|
break
|
||||||
|
|
||||||
|
async def delete(self) -> None:
|
||||||
|
self.log.info(f"Deleting schema {self.schema_name} and all data in it")
|
||||||
|
try:
|
||||||
|
await self._underlying_pool.execute(
|
||||||
|
f"DROP SCHEMA IF EXISTS {self._quoted_schema} CASCADE"
|
||||||
|
)
|
||||||
|
except Exception:
|
||||||
|
self.log.warning("Failed to delete schema", exc_info=True)
|
||||||
|
|
||||||
|
@asynccontextmanager
|
||||||
|
async def acquire(self) -> LoggingConnection:
|
||||||
|
conn: LoggingConnection
|
||||||
|
async with self._conn_sema, self._underlying_pool.acquire() as conn:
|
||||||
|
await conn.execute(f"SET search_path = {self._quoted_schema}")
|
||||||
|
try:
|
||||||
|
yield conn
|
||||||
|
finally:
|
||||||
|
if not conn.wrapped.is_closed():
|
||||||
|
try:
|
||||||
|
await conn.execute(f"SET search_path = {self._default_search_path}")
|
||||||
|
except Exception:
|
||||||
|
self.log.exception("Error resetting search_path after use")
|
||||||
|
await conn.wrapped.close()
|
||||||
|
else:
|
||||||
|
self.log.debug("Connection was closed after use, not resetting search_path")
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = ["ProxyPostgresDatabase"]
|
27
maubot/lib/state_store.py
Normal file
27
maubot/lib/state_store.py
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
# maubot - A plugin-based Matrix bot system.
|
||||||
|
# Copyright (C) 2022 Tulir Asokan
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
from mautrix.client.state_store.asyncpg import PgStateStore as BasePgStateStore
|
||||||
|
|
||||||
|
try:
|
||||||
|
from mautrix.crypto import StateStore as CryptoStateStore
|
||||||
|
|
||||||
|
class PgStateStore(BasePgStateStore, CryptoStateStore):
|
||||||
|
pass
|
||||||
|
|
||||||
|
except ImportError as e:
|
||||||
|
PgStateStore = BasePgStateStore
|
||||||
|
|
||||||
|
__all__ = ["PgStateStore"]
|
@ -18,26 +18,28 @@ used by the builtin import mechanism for sys.path items that are paths
|
|||||||
to Zip archives.
|
to Zip archives.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from importlib import _bootstrap_external
|
|
||||||
from importlib import _bootstrap # for _verbose_message
|
from importlib import _bootstrap # for _verbose_message
|
||||||
import _imp # for check_hash_based_pycs
|
from importlib import _bootstrap_external
|
||||||
import _io # for open
|
|
||||||
import marshal # for loads
|
import marshal # for loads
|
||||||
import sys # for modules
|
import sys # for modules
|
||||||
import time # for mktime
|
import time # for mktime
|
||||||
|
|
||||||
__all__ = ['ZipImportError', 'zipimporter']
|
import _imp # for check_hash_based_pycs
|
||||||
|
import _io # for open
|
||||||
|
|
||||||
|
__all__ = ["ZipImportError", "zipimporter"]
|
||||||
|
|
||||||
|
|
||||||
def _unpack_uint32(data):
|
def _unpack_uint32(data):
|
||||||
"""Convert 4 bytes in little-endian to an integer."""
|
"""Convert 4 bytes in little-endian to an integer."""
|
||||||
assert len(data) == 4
|
assert len(data) == 4
|
||||||
return int.from_bytes(data, 'little')
|
return int.from_bytes(data, "little")
|
||||||
|
|
||||||
|
|
||||||
def _unpack_uint16(data):
|
def _unpack_uint16(data):
|
||||||
"""Convert 2 bytes in little-endian to an integer."""
|
"""Convert 2 bytes in little-endian to an integer."""
|
||||||
assert len(data) == 2
|
assert len(data) == 2
|
||||||
return int.from_bytes(data, 'little')
|
return int.from_bytes(data, "little")
|
||||||
|
|
||||||
|
|
||||||
path_sep = _bootstrap_external.path_sep
|
path_sep = _bootstrap_external.path_sep
|
||||||
@ -47,15 +49,17 @@ alt_path_sep = _bootstrap_external.path_separators[1:]
|
|||||||
class ZipImportError(ImportError):
|
class ZipImportError(ImportError):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
# _read_directory() cache
|
# _read_directory() cache
|
||||||
_zip_directory_cache = {}
|
_zip_directory_cache = {}
|
||||||
|
|
||||||
_module_type = type(sys)
|
_module_type = type(sys)
|
||||||
|
|
||||||
END_CENTRAL_DIR_SIZE = 22
|
END_CENTRAL_DIR_SIZE = 22
|
||||||
STRING_END_ARCHIVE = b'PK\x05\x06'
|
STRING_END_ARCHIVE = b"PK\x05\x06"
|
||||||
MAX_COMMENT_LEN = (1 << 16) - 1
|
MAX_COMMENT_LEN = (1 << 16) - 1
|
||||||
|
|
||||||
|
|
||||||
class zipimporter:
|
class zipimporter:
|
||||||
"""zipimporter(archivepath) -> zipimporter object
|
"""zipimporter(archivepath) -> zipimporter object
|
||||||
|
|
||||||
@ -77,9 +81,10 @@ class zipimporter:
|
|||||||
def __init__(self, path):
|
def __init__(self, path):
|
||||||
if not isinstance(path, str):
|
if not isinstance(path, str):
|
||||||
import os
|
import os
|
||||||
|
|
||||||
path = os.fsdecode(path)
|
path = os.fsdecode(path)
|
||||||
if not path:
|
if not path:
|
||||||
raise ZipImportError('archive path is empty', path=path)
|
raise ZipImportError("archive path is empty", path=path)
|
||||||
if alt_path_sep:
|
if alt_path_sep:
|
||||||
path = path.replace(alt_path_sep, path_sep)
|
path = path.replace(alt_path_sep, path_sep)
|
||||||
|
|
||||||
@ -92,14 +97,14 @@ class zipimporter:
|
|||||||
# Back up one path element.
|
# Back up one path element.
|
||||||
dirname, basename = _bootstrap_external._path_split(path)
|
dirname, basename = _bootstrap_external._path_split(path)
|
||||||
if dirname == path:
|
if dirname == path:
|
||||||
raise ZipImportError('not a Zip file', path=path)
|
raise ZipImportError("not a Zip file", path=path)
|
||||||
path = dirname
|
path = dirname
|
||||||
prefix.append(basename)
|
prefix.append(basename)
|
||||||
else:
|
else:
|
||||||
# it exists
|
# it exists
|
||||||
if (st.st_mode & 0o170000) != 0o100000: # stat.S_ISREG
|
if (st.st_mode & 0o170000) != 0o100000: # stat.S_ISREG
|
||||||
# it's a not file
|
# it's a not file
|
||||||
raise ZipImportError('not a Zip file', path=path)
|
raise ZipImportError("not a Zip file", path=path)
|
||||||
break
|
break
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -154,11 +159,10 @@ class zipimporter:
|
|||||||
# This is possibly a portion of a namespace
|
# This is possibly a portion of a namespace
|
||||||
# package. Return the string representing its path,
|
# package. Return the string representing its path,
|
||||||
# without a trailing separator.
|
# without a trailing separator.
|
||||||
return None, [f'{self.archive}{path_sep}{modpath}']
|
return None, [f"{self.archive}{path_sep}{modpath}"]
|
||||||
|
|
||||||
return None, []
|
return None, []
|
||||||
|
|
||||||
|
|
||||||
# Check whether we can satisfy the import of the module named by
|
# Check whether we can satisfy the import of the module named by
|
||||||
# 'fullname'. Return self if we can, None if we can't.
|
# 'fullname'. Return self if we can, None if we can't.
|
||||||
def find_module(self, fullname, path=None):
|
def find_module(self, fullname, path=None):
|
||||||
@ -172,7 +176,6 @@ class zipimporter:
|
|||||||
"""
|
"""
|
||||||
return self.find_loader(fullname, path)[0]
|
return self.find_loader(fullname, path)[0]
|
||||||
|
|
||||||
|
|
||||||
def get_code(self, fullname):
|
def get_code(self, fullname):
|
||||||
"""get_code(fullname) -> code object.
|
"""get_code(fullname) -> code object.
|
||||||
|
|
||||||
@ -182,7 +185,6 @@ class zipimporter:
|
|||||||
code, ispackage, modpath = _get_module_code(self, fullname)
|
code, ispackage, modpath = _get_module_code(self, fullname)
|
||||||
return code
|
return code
|
||||||
|
|
||||||
|
|
||||||
def get_data(self, pathname):
|
def get_data(self, pathname):
|
||||||
"""get_data(pathname) -> string with file data.
|
"""get_data(pathname) -> string with file data.
|
||||||
|
|
||||||
@ -194,15 +196,14 @@ class zipimporter:
|
|||||||
|
|
||||||
key = pathname
|
key = pathname
|
||||||
if pathname.startswith(self.archive + path_sep):
|
if pathname.startswith(self.archive + path_sep):
|
||||||
key = pathname[len(self.archive + path_sep):]
|
key = pathname[len(self.archive + path_sep) :]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
toc_entry = self._files[key]
|
toc_entry = self._files[key]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise OSError(0, '', key)
|
raise OSError(0, "", key)
|
||||||
return _get_data(self.archive, toc_entry)
|
return _get_data(self.archive, toc_entry)
|
||||||
|
|
||||||
|
|
||||||
# Return a string matching __file__ for the named module
|
# Return a string matching __file__ for the named module
|
||||||
def get_filename(self, fullname):
|
def get_filename(self, fullname):
|
||||||
"""get_filename(fullname) -> filename string.
|
"""get_filename(fullname) -> filename string.
|
||||||
@ -214,7 +215,6 @@ class zipimporter:
|
|||||||
code, ispackage, modpath = _get_module_code(self, fullname)
|
code, ispackage, modpath = _get_module_code(self, fullname)
|
||||||
return modpath
|
return modpath
|
||||||
|
|
||||||
|
|
||||||
def get_source(self, fullname):
|
def get_source(self, fullname):
|
||||||
"""get_source(fullname) -> source string.
|
"""get_source(fullname) -> source string.
|
||||||
|
|
||||||
@ -228,9 +228,9 @@ class zipimporter:
|
|||||||
|
|
||||||
path = _get_module_path(self, fullname)
|
path = _get_module_path(self, fullname)
|
||||||
if mi:
|
if mi:
|
||||||
fullpath = _bootstrap_external._path_join(path, '__init__.py')
|
fullpath = _bootstrap_external._path_join(path, "__init__.py")
|
||||||
else:
|
else:
|
||||||
fullpath = f'{path}.py'
|
fullpath = f"{path}.py"
|
||||||
|
|
||||||
try:
|
try:
|
||||||
toc_entry = self._files[fullpath]
|
toc_entry = self._files[fullpath]
|
||||||
@ -239,7 +239,6 @@ class zipimporter:
|
|||||||
return None
|
return None
|
||||||
return _get_data(self.archive, toc_entry).decode()
|
return _get_data(self.archive, toc_entry).decode()
|
||||||
|
|
||||||
|
|
||||||
# Return a bool signifying whether the module is a package or not.
|
# Return a bool signifying whether the module is a package or not.
|
||||||
def is_package(self, fullname):
|
def is_package(self, fullname):
|
||||||
"""is_package(fullname) -> bool.
|
"""is_package(fullname) -> bool.
|
||||||
@ -252,7 +251,6 @@ class zipimporter:
|
|||||||
raise ZipImportError(f"can't find module {fullname!r}", name=fullname)
|
raise ZipImportError(f"can't find module {fullname!r}", name=fullname)
|
||||||
return mi
|
return mi
|
||||||
|
|
||||||
|
|
||||||
# Load and return the module named by 'fullname'.
|
# Load and return the module named by 'fullname'.
|
||||||
def load_module(self, fullname):
|
def load_module(self, fullname):
|
||||||
"""load_module(fullname) -> module.
|
"""load_module(fullname) -> module.
|
||||||
@ -276,7 +274,7 @@ class zipimporter:
|
|||||||
fullpath = _bootstrap_external._path_join(self.archive, path)
|
fullpath = _bootstrap_external._path_join(self.archive, path)
|
||||||
mod.__path__ = [fullpath]
|
mod.__path__ = [fullpath]
|
||||||
|
|
||||||
if not hasattr(mod, '__builtins__'):
|
if not hasattr(mod, "__builtins__"):
|
||||||
mod.__builtins__ = __builtins__
|
mod.__builtins__ = __builtins__
|
||||||
_bootstrap_external._fix_up_module(mod.__dict__, fullname, modpath)
|
_bootstrap_external._fix_up_module(mod.__dict__, fullname, modpath)
|
||||||
exec(code, mod.__dict__)
|
exec(code, mod.__dict__)
|
||||||
@ -287,11 +285,10 @@ class zipimporter:
|
|||||||
try:
|
try:
|
||||||
mod = sys.modules[fullname]
|
mod = sys.modules[fullname]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
raise ImportError(f'Loaded module {fullname!r} not found in sys.modules')
|
raise ImportError(f"Loaded module {fullname!r} not found in sys.modules")
|
||||||
_bootstrap._verbose_message('import {} # loaded from Zip {}', fullname, modpath)
|
_bootstrap._verbose_message("import {} # loaded from Zip {}", fullname, modpath)
|
||||||
return mod
|
return mod
|
||||||
|
|
||||||
|
|
||||||
def get_resource_reader(self, fullname):
|
def get_resource_reader(self, fullname):
|
||||||
"""Return the ResourceReader for a package in a zip file.
|
"""Return the ResourceReader for a package in a zip file.
|
||||||
|
|
||||||
@ -305,11 +302,11 @@ class zipimporter:
|
|||||||
return None
|
return None
|
||||||
if not _ZipImportResourceReader._registered:
|
if not _ZipImportResourceReader._registered:
|
||||||
from importlib.abc import ResourceReader
|
from importlib.abc import ResourceReader
|
||||||
|
|
||||||
ResourceReader.register(_ZipImportResourceReader)
|
ResourceReader.register(_ZipImportResourceReader)
|
||||||
_ZipImportResourceReader._registered = True
|
_ZipImportResourceReader._registered = True
|
||||||
return _ZipImportResourceReader(self, fullname)
|
return _ZipImportResourceReader(self, fullname)
|
||||||
|
|
||||||
|
|
||||||
def __repr__(self):
|
def __repr__(self):
|
||||||
return f'<zipimporter object "{self.archive}{path_sep}{self.prefix}">'
|
return f'<zipimporter object "{self.archive}{path_sep}{self.prefix}">'
|
||||||
|
|
||||||
@ -320,16 +317,18 @@ class zipimporter:
|
|||||||
# are swapped by initzipimport() if we run in optimized mode. Also,
|
# are swapped by initzipimport() if we run in optimized mode. Also,
|
||||||
# '/' is replaced by path_sep there.
|
# '/' is replaced by path_sep there.
|
||||||
_zip_searchorder = (
|
_zip_searchorder = (
|
||||||
(path_sep + '__init__.pyc', True, True),
|
(path_sep + "__init__.pyc", True, True),
|
||||||
(path_sep + '__init__.py', False, True),
|
(path_sep + "__init__.py", False, True),
|
||||||
('.pyc', True, False),
|
(".pyc", True, False),
|
||||||
('.py', False, False),
|
(".py", False, False),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
# Given a module name, return the potential file path in the
|
# Given a module name, return the potential file path in the
|
||||||
# archive (without extension).
|
# archive (without extension).
|
||||||
def _get_module_path(self, fullname):
|
def _get_module_path(self, fullname):
|
||||||
return self.prefix + fullname.rpartition('.')[2]
|
return self.prefix + fullname.rpartition(".")[2]
|
||||||
|
|
||||||
|
|
||||||
# Does this path represent a directory?
|
# Does this path represent a directory?
|
||||||
def _is_dir(self, path):
|
def _is_dir(self, path):
|
||||||
@ -340,6 +339,7 @@ def _is_dir(self, path):
|
|||||||
# If dirpath is present in self._files, we have a directory.
|
# If dirpath is present in self._files, we have a directory.
|
||||||
return dirpath in self._files
|
return dirpath in self._files
|
||||||
|
|
||||||
|
|
||||||
# Return some information about a module.
|
# Return some information about a module.
|
||||||
def _get_module_info(self, fullname):
|
def _get_module_info(self, fullname):
|
||||||
path = _get_module_path(self, fullname)
|
path = _get_module_path(self, fullname)
|
||||||
@ -352,6 +352,7 @@ def _get_module_info(self, fullname):
|
|||||||
|
|
||||||
# implementation
|
# implementation
|
||||||
|
|
||||||
|
|
||||||
# _read_directory(archive) -> files dict (new reference)
|
# _read_directory(archive) -> files dict (new reference)
|
||||||
#
|
#
|
||||||
# Given a path to a Zip archive, build a dict, mapping file names
|
# Given a path to a Zip archive, build a dict, mapping file names
|
||||||
@ -374,7 +375,7 @@ def _get_module_info(self, fullname):
|
|||||||
# data_size and file_offset are 0.
|
# data_size and file_offset are 0.
|
||||||
def _read_directory(archive):
|
def _read_directory(archive):
|
||||||
try:
|
try:
|
||||||
fp = _io.open(archive, 'rb')
|
fp = _io.open(archive, "rb")
|
||||||
except OSError:
|
except OSError:
|
||||||
raise ZipImportError(f"can't open Zip file: {archive!r}", path=archive)
|
raise ZipImportError(f"can't open Zip file: {archive!r}", path=archive)
|
||||||
|
|
||||||
@ -394,36 +395,33 @@ def _read_directory(archive):
|
|||||||
fp.seek(0, 2)
|
fp.seek(0, 2)
|
||||||
file_size = fp.tell()
|
file_size = fp.tell()
|
||||||
except OSError:
|
except OSError:
|
||||||
raise ZipImportError(f"can't read Zip file: {archive!r}",
|
raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive)
|
||||||
path=archive)
|
max_comment_start = max(file_size - MAX_COMMENT_LEN - END_CENTRAL_DIR_SIZE, 0)
|
||||||
max_comment_start = max(file_size - MAX_COMMENT_LEN -
|
|
||||||
END_CENTRAL_DIR_SIZE, 0)
|
|
||||||
try:
|
try:
|
||||||
fp.seek(max_comment_start)
|
fp.seek(max_comment_start)
|
||||||
data = fp.read()
|
data = fp.read()
|
||||||
except OSError:
|
except OSError:
|
||||||
raise ZipImportError(f"can't read Zip file: {archive!r}",
|
raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive)
|
||||||
path=archive)
|
|
||||||
pos = data.rfind(STRING_END_ARCHIVE)
|
pos = data.rfind(STRING_END_ARCHIVE)
|
||||||
if pos < 0:
|
if pos < 0:
|
||||||
raise ZipImportError(f'not a Zip file: {archive!r}',
|
raise ZipImportError(f"not a Zip file: {archive!r}", path=archive)
|
||||||
path=archive)
|
buffer = data[pos : pos + END_CENTRAL_DIR_SIZE]
|
||||||
buffer = data[pos:pos+END_CENTRAL_DIR_SIZE]
|
|
||||||
if len(buffer) != END_CENTRAL_DIR_SIZE:
|
if len(buffer) != END_CENTRAL_DIR_SIZE:
|
||||||
raise ZipImportError(f"corrupt Zip file: {archive!r}",
|
raise ZipImportError(f"corrupt Zip file: {archive!r}", path=archive)
|
||||||
path=archive)
|
|
||||||
header_position = file_size - len(data) + pos
|
header_position = file_size - len(data) + pos
|
||||||
|
|
||||||
header_size = _unpack_uint32(buffer[12:16])
|
header_size = _unpack_uint32(buffer[12:16])
|
||||||
header_offset = _unpack_uint32(buffer[16:20])
|
header_offset = _unpack_uint32(buffer[16:20])
|
||||||
if header_position < header_size:
|
if header_position < header_size:
|
||||||
raise ZipImportError(f'bad central directory size: {archive!r}', path=archive)
|
raise ZipImportError(f"bad central directory size: {archive!r}", path=archive)
|
||||||
if header_position < header_offset:
|
if header_position < header_offset:
|
||||||
raise ZipImportError(f'bad central directory offset: {archive!r}', path=archive)
|
raise ZipImportError(f"bad central directory offset: {archive!r}", path=archive)
|
||||||
header_position -= header_size
|
header_position -= header_size
|
||||||
arc_offset = header_position - header_offset
|
arc_offset = header_position - header_offset
|
||||||
if arc_offset < 0:
|
if arc_offset < 0:
|
||||||
raise ZipImportError(f'bad central directory size or offset: {archive!r}', path=archive)
|
raise ZipImportError(
|
||||||
|
f"bad central directory size or offset: {archive!r}", path=archive
|
||||||
|
)
|
||||||
|
|
||||||
files = {}
|
files = {}
|
||||||
# Start of Central Directory
|
# Start of Central Directory
|
||||||
@ -435,12 +433,12 @@ def _read_directory(archive):
|
|||||||
while True:
|
while True:
|
||||||
buffer = fp.read(46)
|
buffer = fp.read(46)
|
||||||
if len(buffer) < 4:
|
if len(buffer) < 4:
|
||||||
raise EOFError('EOF read where not expected')
|
raise EOFError("EOF read where not expected")
|
||||||
# Start of file header
|
# Start of file header
|
||||||
if buffer[:4] != b'PK\x01\x02':
|
if buffer[:4] != b"PK\x01\x02":
|
||||||
break # Bad: Central Dir File Header
|
break # Bad: Central Dir File Header
|
||||||
if len(buffer) != 46:
|
if len(buffer) != 46:
|
||||||
raise EOFError('EOF read where not expected')
|
raise EOFError("EOF read where not expected")
|
||||||
flags = _unpack_uint16(buffer[8:10])
|
flags = _unpack_uint16(buffer[8:10])
|
||||||
compress = _unpack_uint16(buffer[10:12])
|
compress = _unpack_uint16(buffer[10:12])
|
||||||
time = _unpack_uint16(buffer[12:14])
|
time = _unpack_uint16(buffer[12:14])
|
||||||
@ -454,7 +452,7 @@ def _read_directory(archive):
|
|||||||
file_offset = _unpack_uint32(buffer[42:46])
|
file_offset = _unpack_uint32(buffer[42:46])
|
||||||
header_size = name_size + extra_size + comment_size
|
header_size = name_size + extra_size + comment_size
|
||||||
if file_offset > header_offset:
|
if file_offset > header_offset:
|
||||||
raise ZipImportError(f'bad local header offset: {archive!r}', path=archive)
|
raise ZipImportError(f"bad local header offset: {archive!r}", path=archive)
|
||||||
file_offset += arc_offset
|
file_offset += arc_offset
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -478,18 +476,19 @@ def _read_directory(archive):
|
|||||||
else:
|
else:
|
||||||
# Historical ZIP filename encoding
|
# Historical ZIP filename encoding
|
||||||
try:
|
try:
|
||||||
name = name.decode('ascii')
|
name = name.decode("ascii")
|
||||||
except UnicodeDecodeError:
|
except UnicodeDecodeError:
|
||||||
name = name.decode('latin1').translate(cp437_table)
|
name = name.decode("latin1").translate(cp437_table)
|
||||||
|
|
||||||
name = name.replace('/', path_sep)
|
name = name.replace("/", path_sep)
|
||||||
path = _bootstrap_external._path_join(archive, name)
|
path = _bootstrap_external._path_join(archive, name)
|
||||||
t = (path, compress, data_size, file_size, file_offset, time, date, crc)
|
t = (path, compress, data_size, file_size, file_offset, time, date, crc)
|
||||||
files[name] = t
|
files[name] = t
|
||||||
count += 1
|
count += 1
|
||||||
_bootstrap._verbose_message('zipimport: found {} names in {!r}', count, archive)
|
_bootstrap._verbose_message("zipimport: found {} names in {!r}", count, archive)
|
||||||
return files
|
return files
|
||||||
|
|
||||||
|
|
||||||
# During bootstrap, we may need to load the encodings
|
# During bootstrap, we may need to load the encodings
|
||||||
# package from a ZIP file. But the cp437 encoding is implemented
|
# package from a ZIP file. But the cp437 encoding is implemented
|
||||||
# in Python in the encodings package.
|
# in Python in the encodings package.
|
||||||
@ -498,35 +497,36 @@ def _read_directory(archive):
|
|||||||
# the cp437 encoding.
|
# the cp437 encoding.
|
||||||
cp437_table = (
|
cp437_table = (
|
||||||
# ASCII part, 8 rows x 16 chars
|
# ASCII part, 8 rows x 16 chars
|
||||||
'\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f'
|
"\x00\x01\x02\x03\x04\x05\x06\x07\x08\t\n\x0b\x0c\r\x0e\x0f"
|
||||||
'\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f'
|
"\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f"
|
||||||
' !"#$%&\'()*+,-./'
|
" !\"#$%&'()*+,-./"
|
||||||
'0123456789:;<=>?'
|
"0123456789:;<=>?"
|
||||||
'@ABCDEFGHIJKLMNO'
|
"@ABCDEFGHIJKLMNO"
|
||||||
'PQRSTUVWXYZ[\\]^_'
|
"PQRSTUVWXYZ[\\]^_"
|
||||||
'`abcdefghijklmno'
|
"`abcdefghijklmno"
|
||||||
'pqrstuvwxyz{|}~\x7f'
|
"pqrstuvwxyz{|}~\x7f"
|
||||||
# non-ASCII part, 16 rows x 8 chars
|
# non-ASCII part, 16 rows x 8 chars
|
||||||
'\xc7\xfc\xe9\xe2\xe4\xe0\xe5\xe7'
|
"\xc7\xfc\xe9\xe2\xe4\xe0\xe5\xe7"
|
||||||
'\xea\xeb\xe8\xef\xee\xec\xc4\xc5'
|
"\xea\xeb\xe8\xef\xee\xec\xc4\xc5"
|
||||||
'\xc9\xe6\xc6\xf4\xf6\xf2\xfb\xf9'
|
"\xc9\xe6\xc6\xf4\xf6\xf2\xfb\xf9"
|
||||||
'\xff\xd6\xdc\xa2\xa3\xa5\u20a7\u0192'
|
"\xff\xd6\xdc\xa2\xa3\xa5\u20a7\u0192"
|
||||||
'\xe1\xed\xf3\xfa\xf1\xd1\xaa\xba'
|
"\xe1\xed\xf3\xfa\xf1\xd1\xaa\xba"
|
||||||
'\xbf\u2310\xac\xbd\xbc\xa1\xab\xbb'
|
"\xbf\u2310\xac\xbd\xbc\xa1\xab\xbb"
|
||||||
'\u2591\u2592\u2593\u2502\u2524\u2561\u2562\u2556'
|
"\u2591\u2592\u2593\u2502\u2524\u2561\u2562\u2556"
|
||||||
'\u2555\u2563\u2551\u2557\u255d\u255c\u255b\u2510'
|
"\u2555\u2563\u2551\u2557\u255d\u255c\u255b\u2510"
|
||||||
'\u2514\u2534\u252c\u251c\u2500\u253c\u255e\u255f'
|
"\u2514\u2534\u252c\u251c\u2500\u253c\u255e\u255f"
|
||||||
'\u255a\u2554\u2569\u2566\u2560\u2550\u256c\u2567'
|
"\u255a\u2554\u2569\u2566\u2560\u2550\u256c\u2567"
|
||||||
'\u2568\u2564\u2565\u2559\u2558\u2552\u2553\u256b'
|
"\u2568\u2564\u2565\u2559\u2558\u2552\u2553\u256b"
|
||||||
'\u256a\u2518\u250c\u2588\u2584\u258c\u2590\u2580'
|
"\u256a\u2518\u250c\u2588\u2584\u258c\u2590\u2580"
|
||||||
'\u03b1\xdf\u0393\u03c0\u03a3\u03c3\xb5\u03c4'
|
"\u03b1\xdf\u0393\u03c0\u03a3\u03c3\xb5\u03c4"
|
||||||
'\u03a6\u0398\u03a9\u03b4\u221e\u03c6\u03b5\u2229'
|
"\u03a6\u0398\u03a9\u03b4\u221e\u03c6\u03b5\u2229"
|
||||||
'\u2261\xb1\u2265\u2264\u2320\u2321\xf7\u2248'
|
"\u2261\xb1\u2265\u2264\u2320\u2321\xf7\u2248"
|
||||||
'\xb0\u2219\xb7\u221a\u207f\xb2\u25a0\xa0'
|
"\xb0\u2219\xb7\u221a\u207f\xb2\u25a0\xa0"
|
||||||
)
|
)
|
||||||
|
|
||||||
_importing_zlib = False
|
_importing_zlib = False
|
||||||
|
|
||||||
|
|
||||||
# Return the zlib.decompress function object, or NULL if zlib couldn't
|
# Return the zlib.decompress function object, or NULL if zlib couldn't
|
||||||
# be imported. The function is cached when found, so subsequent calls
|
# be imported. The function is cached when found, so subsequent calls
|
||||||
# don't import zlib again.
|
# don't import zlib again.
|
||||||
@ -535,28 +535,29 @@ def _get_decompress_func():
|
|||||||
if _importing_zlib:
|
if _importing_zlib:
|
||||||
# Someone has a zlib.py[co] in their Zip file
|
# Someone has a zlib.py[co] in their Zip file
|
||||||
# let's avoid a stack overflow.
|
# let's avoid a stack overflow.
|
||||||
_bootstrap._verbose_message('zipimport: zlib UNAVAILABLE')
|
_bootstrap._verbose_message("zipimport: zlib UNAVAILABLE")
|
||||||
raise ZipImportError("can't decompress data; zlib not available")
|
raise ZipImportError("can't decompress data; zlib not available")
|
||||||
|
|
||||||
_importing_zlib = True
|
_importing_zlib = True
|
||||||
try:
|
try:
|
||||||
from zlib import decompress
|
from zlib import decompress
|
||||||
except Exception:
|
except Exception:
|
||||||
_bootstrap._verbose_message('zipimport: zlib UNAVAILABLE')
|
_bootstrap._verbose_message("zipimport: zlib UNAVAILABLE")
|
||||||
raise ZipImportError("can't decompress data; zlib not available")
|
raise ZipImportError("can't decompress data; zlib not available")
|
||||||
finally:
|
finally:
|
||||||
_importing_zlib = False
|
_importing_zlib = False
|
||||||
|
|
||||||
_bootstrap._verbose_message('zipimport: zlib available')
|
_bootstrap._verbose_message("zipimport: zlib available")
|
||||||
return decompress
|
return decompress
|
||||||
|
|
||||||
|
|
||||||
# Given a path to a Zip file and a toc_entry, return the (uncompressed) data.
|
# Given a path to a Zip file and a toc_entry, return the (uncompressed) data.
|
||||||
def _get_data(archive, toc_entry):
|
def _get_data(archive, toc_entry):
|
||||||
datapath, compress, data_size, file_size, file_offset, time, date, crc = toc_entry
|
datapath, compress, data_size, file_size, file_offset, time, date, crc = toc_entry
|
||||||
if data_size < 0:
|
if data_size < 0:
|
||||||
raise ZipImportError('negative data size')
|
raise ZipImportError("negative data size")
|
||||||
|
|
||||||
with _io.open(archive, 'rb') as fp:
|
with _io.open(archive, "rb") as fp:
|
||||||
# Check to make sure the local file header is correct
|
# Check to make sure the local file header is correct
|
||||||
try:
|
try:
|
||||||
fp.seek(file_offset)
|
fp.seek(file_offset)
|
||||||
@ -564,11 +565,11 @@ def _get_data(archive, toc_entry):
|
|||||||
raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive)
|
raise ZipImportError(f"can't read Zip file: {archive!r}", path=archive)
|
||||||
buffer = fp.read(30)
|
buffer = fp.read(30)
|
||||||
if len(buffer) != 30:
|
if len(buffer) != 30:
|
||||||
raise EOFError('EOF read where not expected')
|
raise EOFError("EOF read where not expected")
|
||||||
|
|
||||||
if buffer[:4] != b'PK\x03\x04':
|
if buffer[:4] != b"PK\x03\x04":
|
||||||
# Bad: Local File Header
|
# Bad: Local File Header
|
||||||
raise ZipImportError(f'bad local file header: {archive!r}', path=archive)
|
raise ZipImportError(f"bad local file header: {archive!r}", path=archive)
|
||||||
|
|
||||||
name_size = _unpack_uint16(buffer[26:28])
|
name_size = _unpack_uint16(buffer[26:28])
|
||||||
extra_size = _unpack_uint16(buffer[28:30])
|
extra_size = _unpack_uint16(buffer[28:30])
|
||||||
@ -601,16 +602,17 @@ def _eq_mtime(t1, t2):
|
|||||||
# dostime only stores even seconds, so be lenient
|
# dostime only stores even seconds, so be lenient
|
||||||
return abs(t1 - t2) <= 1
|
return abs(t1 - t2) <= 1
|
||||||
|
|
||||||
|
|
||||||
# Given the contents of a .py[co] file, unmarshal the data
|
# Given the contents of a .py[co] file, unmarshal the data
|
||||||
# and return the code object. Return None if it the magic word doesn't
|
# and return the code object. Return None if it the magic word doesn't
|
||||||
# match (we do this instead of raising an exception as we fall back
|
# match (we do this instead of raising an exception as we fall back
|
||||||
# to .py if available and we don't want to mask other errors).
|
# to .py if available and we don't want to mask other errors).
|
||||||
def _unmarshal_code(pathname, data, mtime):
|
def _unmarshal_code(pathname, data, mtime):
|
||||||
if len(data) < 16:
|
if len(data) < 16:
|
||||||
raise ZipImportError('bad pyc data')
|
raise ZipImportError("bad pyc data")
|
||||||
|
|
||||||
if data[:4] != _bootstrap_external.MAGIC_NUMBER:
|
if data[:4] != _bootstrap_external.MAGIC_NUMBER:
|
||||||
_bootstrap._verbose_message('{!r} has bad magic', pathname)
|
_bootstrap._verbose_message("{!r} has bad magic", pathname)
|
||||||
return None # signal caller to try alternative
|
return None # signal caller to try alternative
|
||||||
|
|
||||||
flags = _unpack_uint32(data[4:8])
|
flags = _unpack_uint32(data[4:8])
|
||||||
@ -619,47 +621,57 @@ def _unmarshal_code(pathname, data, mtime):
|
|||||||
# pycs. We could validate hash-based pycs against the source, but it
|
# pycs. We could validate hash-based pycs against the source, but it
|
||||||
# seems likely that most people putting hash-based pycs in a zipfile
|
# seems likely that most people putting hash-based pycs in a zipfile
|
||||||
# will use unchecked ones.
|
# will use unchecked ones.
|
||||||
if (_imp.check_hash_based_pycs != 'never' and
|
if _imp.check_hash_based_pycs != "never" and (
|
||||||
(flags != 0x1 or _imp.check_hash_based_pycs == 'always')):
|
flags != 0x1 or _imp.check_hash_based_pycs == "always"
|
||||||
|
):
|
||||||
return None
|
return None
|
||||||
elif mtime != 0 and not _eq_mtime(_unpack_uint32(data[8:12]), mtime):
|
elif mtime != 0 and not _eq_mtime(_unpack_uint32(data[8:12]), mtime):
|
||||||
_bootstrap._verbose_message('{!r} has bad mtime', pathname)
|
_bootstrap._verbose_message("{!r} has bad mtime", pathname)
|
||||||
return None # signal caller to try alternative
|
return None # signal caller to try alternative
|
||||||
|
|
||||||
# XXX the pyc's size field is ignored; timestamp collisions are probably
|
# XXX the pyc's size field is ignored; timestamp collisions are probably
|
||||||
# unimportant with zip files.
|
# unimportant with zip files.
|
||||||
code = marshal.loads(data[16:])
|
code = marshal.loads(data[16:])
|
||||||
if not isinstance(code, _code_type):
|
if not isinstance(code, _code_type):
|
||||||
raise TypeError(f'compiled module {pathname!r} is not a code object')
|
raise TypeError(f"compiled module {pathname!r} is not a code object")
|
||||||
return code
|
return code
|
||||||
|
|
||||||
|
|
||||||
_code_type = type(_unmarshal_code.__code__)
|
_code_type = type(_unmarshal_code.__code__)
|
||||||
|
|
||||||
|
|
||||||
# Replace any occurrences of '\r\n?' in the input string with '\n'.
|
# Replace any occurrences of '\r\n?' in the input string with '\n'.
|
||||||
# This converts DOS and Mac line endings to Unix line endings.
|
# This converts DOS and Mac line endings to Unix line endings.
|
||||||
def _normalize_line_endings(source):
|
def _normalize_line_endings(source):
|
||||||
source = source.replace(b'\r\n', b'\n')
|
source = source.replace(b"\r\n", b"\n")
|
||||||
source = source.replace(b'\r', b'\n')
|
source = source.replace(b"\r", b"\n")
|
||||||
return source
|
return source
|
||||||
|
|
||||||
|
|
||||||
# Given a string buffer containing Python source code, compile it
|
# Given a string buffer containing Python source code, compile it
|
||||||
# and return a code object.
|
# and return a code object.
|
||||||
def _compile_source(pathname, source):
|
def _compile_source(pathname, source):
|
||||||
source = _normalize_line_endings(source)
|
source = _normalize_line_endings(source)
|
||||||
return compile(source, pathname, 'exec', dont_inherit=True)
|
return compile(source, pathname, "exec", dont_inherit=True)
|
||||||
|
|
||||||
|
|
||||||
# Convert the date/time values found in the Zip archive to a value
|
# Convert the date/time values found in the Zip archive to a value
|
||||||
# that's compatible with the time stamp stored in .pyc files.
|
# that's compatible with the time stamp stored in .pyc files.
|
||||||
def _parse_dostime(d, t):
|
def _parse_dostime(d, t):
|
||||||
return time.mktime((
|
return time.mktime(
|
||||||
|
(
|
||||||
(d >> 9) + 1980, # bits 9..15: year
|
(d >> 9) + 1980, # bits 9..15: year
|
||||||
(d >> 5) & 0xF, # bits 5..8: month
|
(d >> 5) & 0xF, # bits 5..8: month
|
||||||
d & 0x1F, # bits 0..4: day
|
d & 0x1F, # bits 0..4: day
|
||||||
t >> 11, # bits 11..15: hours
|
t >> 11, # bits 11..15: hours
|
||||||
(t >> 5) & 0x3F, # bits 8..10: minutes
|
(t >> 5) & 0x3F, # bits 8..10: minutes
|
||||||
(t & 0x1F) * 2, # bits 0..7: seconds / 2
|
(t & 0x1F) * 2, # bits 0..7: seconds / 2
|
||||||
-1, -1, -1))
|
-1,
|
||||||
|
-1,
|
||||||
|
-1,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
# Given a path to a .pyc file in the archive, return the
|
# Given a path to a .pyc file in the archive, return the
|
||||||
# modification time of the matching .py file, or 0 if no source
|
# modification time of the matching .py file, or 0 if no source
|
||||||
@ -667,7 +679,7 @@ def _parse_dostime(d, t):
|
|||||||
def _get_mtime_of_source(self, path):
|
def _get_mtime_of_source(self, path):
|
||||||
try:
|
try:
|
||||||
# strip 'c' or 'o' from *.py[co]
|
# strip 'c' or 'o' from *.py[co]
|
||||||
assert path[-1:] in ('c', 'o')
|
assert path[-1:] in ("c", "o")
|
||||||
path = path[:-1]
|
path = path[:-1]
|
||||||
toc_entry = self._files[path]
|
toc_entry = self._files[path]
|
||||||
# fetch the time stamp of the .py file for comparison
|
# fetch the time stamp of the .py file for comparison
|
||||||
@ -678,13 +690,14 @@ def _get_mtime_of_source(self, path):
|
|||||||
except (KeyError, IndexError, TypeError):
|
except (KeyError, IndexError, TypeError):
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
|
||||||
# Get the code object associated with the module specified by
|
# Get the code object associated with the module specified by
|
||||||
# 'fullname'.
|
# 'fullname'.
|
||||||
def _get_module_code(self, fullname):
|
def _get_module_code(self, fullname):
|
||||||
path = _get_module_path(self, fullname)
|
path = _get_module_path(self, fullname)
|
||||||
for suffix, isbytecode, ispackage in _zip_searchorder:
|
for suffix, isbytecode, ispackage in _zip_searchorder:
|
||||||
fullpath = path + suffix
|
fullpath = path + suffix
|
||||||
_bootstrap._verbose_message('trying {}{}{}', self.archive, path_sep, fullpath, verbosity=2)
|
_bootstrap._verbose_message("trying {}{}{}", self.archive, path_sep, fullpath, verbosity=2)
|
||||||
try:
|
try:
|
||||||
toc_entry = self._files[fullpath]
|
toc_entry = self._files[fullpath]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
@ -713,6 +726,7 @@ class _ZipImportResourceReader:
|
|||||||
This class is allowed to reference all the innards and private parts of
|
This class is allowed to reference all the innards and private parts of
|
||||||
the zipimporter.
|
the zipimporter.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
_registered = False
|
_registered = False
|
||||||
|
|
||||||
def __init__(self, zipimporter, fullname):
|
def __init__(self, zipimporter, fullname):
|
||||||
@ -720,9 +734,10 @@ class _ZipImportResourceReader:
|
|||||||
self.fullname = fullname
|
self.fullname = fullname
|
||||||
|
|
||||||
def open_resource(self, resource):
|
def open_resource(self, resource):
|
||||||
fullname_as_path = self.fullname.replace('.', '/')
|
fullname_as_path = self.fullname.replace(".", "/")
|
||||||
path = f'{fullname_as_path}/{resource}'
|
path = f"{fullname_as_path}/{resource}"
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return BytesIO(self.zipimporter.get_data(path))
|
return BytesIO(self.zipimporter.get_data(path))
|
||||||
except OSError:
|
except OSError:
|
||||||
@ -737,8 +752,8 @@ class _ZipImportResourceReader:
|
|||||||
def is_resource(self, name):
|
def is_resource(self, name):
|
||||||
# Maybe we could do better, but if we can get the data, it's a
|
# Maybe we could do better, but if we can get the data, it's a
|
||||||
# resource. Otherwise it isn't.
|
# resource. Otherwise it isn't.
|
||||||
fullname_as_path = self.fullname.replace('.', '/')
|
fullname_as_path = self.fullname.replace(".", "/")
|
||||||
path = f'{fullname_as_path}/{name}'
|
path = f"{fullname_as_path}/{name}"
|
||||||
try:
|
try:
|
||||||
self.zipimporter.get_data(path)
|
self.zipimporter.get_data(path)
|
||||||
except OSError:
|
except OSError:
|
||||||
@ -754,11 +769,12 @@ class _ZipImportResourceReader:
|
|||||||
# top of the archive, and then we iterate through _files looking for
|
# top of the archive, and then we iterate through _files looking for
|
||||||
# names inside that "directory".
|
# names inside that "directory".
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
fullname_path = Path(self.zipimporter.get_filename(self.fullname))
|
fullname_path = Path(self.zipimporter.get_filename(self.fullname))
|
||||||
relative_path = fullname_path.relative_to(self.zipimporter.archive)
|
relative_path = fullname_path.relative_to(self.zipimporter.archive)
|
||||||
# Don't forget that fullname names a package, so its path will include
|
# Don't forget that fullname names a package, so its path will include
|
||||||
# __init__.py, which we want to ignore.
|
# __init__.py, which we want to ignore.
|
||||||
assert relative_path.name == '__init__.py'
|
assert relative_path.name == "__init__.py"
|
||||||
package_path = relative_path.parent
|
package_path = relative_path.parent
|
||||||
subdirs_seen = set()
|
subdirs_seen = set()
|
||||||
for filename in self.zipimporter._files:
|
for filename in self.zipimporter._files:
|
||||||
|
@ -1,2 +1,3 @@
|
|||||||
from .abc import PluginLoader, PluginClass, IDConflictError, PluginMeta
|
from .abc import BasePluginLoader, IDConflictError, PluginClass, PluginLoader
|
||||||
from .zip import ZippedPluginLoader, MaubotZipImportError
|
from .meta import DatabaseType, PluginMeta
|
||||||
|
from .zip import MaubotZipImportError, ZippedPluginLoader
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# maubot - A plugin-based Matrix bot system.
|
# maubot - A plugin-based Matrix bot system.
|
||||||
# Copyright (C) 2019 Tulir Asokan
|
# Copyright (C) 2022 Tulir Asokan
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
@ -13,17 +13,14 @@
|
|||||||
#
|
#
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
from typing import TypeVar, Type, Dict, Set, List, TYPE_CHECKING
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import TYPE_CHECKING, TypeVar
|
||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
import asyncio
|
import asyncio
|
||||||
|
|
||||||
from attr import dataclass
|
|
||||||
from packaging.version import Version, InvalidVersion
|
|
||||||
from mautrix.client.api.types.util import (SerializableAttrs, SerializerError, serializer,
|
|
||||||
deserializer)
|
|
||||||
|
|
||||||
from ..__meta__ import __version__
|
|
||||||
from ..plugin_base import Plugin
|
from ..plugin_base import Plugin
|
||||||
|
from .meta import PluginMeta
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from ..instance import PluginInstance
|
from ..instance import PluginInstance
|
||||||
@ -35,47 +32,40 @@ class IDConflictError(Exception):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
@serializer(Version)
|
class BasePluginLoader(ABC):
|
||||||
def serialize_version(version: Version) -> str:
|
meta: PluginMeta
|
||||||
return str(version)
|
|
||||||
|
@property
|
||||||
|
@abstractmethod
|
||||||
|
def source(self) -> str:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def sync_read_file(self, path: str) -> bytes:
|
||||||
|
raise NotImplementedError("This loader doesn't support synchronous operations")
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def read_file(self, path: str) -> bytes:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def sync_list_files(self, directory: str) -> list[str]:
|
||||||
|
raise NotImplementedError("This loader doesn't support synchronous operations")
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def list_files(self, directory: str) -> list[str]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
@deserializer(Version)
|
class PluginLoader(BasePluginLoader, ABC):
|
||||||
def deserialize_version(version: str) -> Version:
|
id_cache: dict[str, PluginLoader] = {}
|
||||||
try:
|
|
||||||
return Version(version)
|
|
||||||
except InvalidVersion as e:
|
|
||||||
raise SerializerError("Invalid version") from e
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class PluginMeta(SerializableAttrs['PluginMeta']):
|
|
||||||
id: str
|
|
||||||
version: Version
|
|
||||||
modules: List[str]
|
|
||||||
main_class: str
|
|
||||||
|
|
||||||
maubot: Version = Version(__version__)
|
|
||||||
database: bool = False
|
|
||||||
config: bool = False
|
|
||||||
webapp: bool = False
|
|
||||||
license: str = ""
|
|
||||||
extra_files: List[str] = []
|
|
||||||
dependencies: List[str] = []
|
|
||||||
soft_dependencies: List[str] = []
|
|
||||||
|
|
||||||
|
|
||||||
class PluginLoader(ABC):
|
|
||||||
id_cache: Dict[str, 'PluginLoader'] = {}
|
|
||||||
|
|
||||||
meta: PluginMeta
|
meta: PluginMeta
|
||||||
references: Set['PluginInstance']
|
references: set[PluginInstance]
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.references = set()
|
self.references = set()
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def find(cls, plugin_id: str) -> 'PluginLoader':
|
def find(cls, plugin_id: str) -> PluginLoader:
|
||||||
return cls.id_cache[plugin_id]
|
return cls.id_cache[plugin_id]
|
||||||
|
|
||||||
def to_dict(self) -> dict:
|
def to_dict(self) -> dict:
|
||||||
@ -85,33 +75,22 @@ class PluginLoader(ABC):
|
|||||||
"instances": [instance.to_dict() for instance in self.references],
|
"instances": [instance.to_dict() for instance in self.references],
|
||||||
}
|
}
|
||||||
|
|
||||||
@property
|
|
||||||
@abstractmethod
|
|
||||||
def source(self) -> str:
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
async def read_file(self, path: str) -> bytes:
|
|
||||||
pass
|
|
||||||
|
|
||||||
async def stop_instances(self) -> None:
|
async def stop_instances(self) -> None:
|
||||||
await asyncio.gather(*[instance.stop() for instance
|
await asyncio.gather(
|
||||||
in self.references if instance.started])
|
*[instance.stop() for instance in self.references if instance.started]
|
||||||
|
)
|
||||||
|
|
||||||
async def start_instances(self) -> None:
|
async def start_instances(self) -> None:
|
||||||
await asyncio.gather(*[instance.start() for instance
|
await asyncio.gather(
|
||||||
in self.references if instance.enabled])
|
*[instance.start() for instance in self.references if instance.enabled]
|
||||||
|
)
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
async def load(self) -> Type[PluginClass]:
|
async def load(self) -> type[PluginClass]:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
async def reload(self) -> Type[PluginClass]:
|
async def reload(self) -> type[PluginClass]:
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
async def unload(self) -> None:
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
|
69
maubot/loader/meta.py
Normal file
69
maubot/loader/meta.py
Normal file
@ -0,0 +1,69 @@
|
|||||||
|
# maubot - A plugin-based Matrix bot system.
|
||||||
|
# Copyright (C) 2022 Tulir Asokan
|
||||||
|
#
|
||||||
|
# This program is free software: you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
|
# the Free Software Foundation, either version 3 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU Affero General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
|
from attr import dataclass
|
||||||
|
from packaging.version import InvalidVersion, Version
|
||||||
|
|
||||||
|
from mautrix.types import (
|
||||||
|
ExtensibleEnum,
|
||||||
|
SerializableAttrs,
|
||||||
|
SerializerError,
|
||||||
|
deserializer,
|
||||||
|
serializer,
|
||||||
|
)
|
||||||
|
|
||||||
|
from ..__meta__ import __version__
|
||||||
|
|
||||||
|
|
||||||
|
@serializer(Version)
|
||||||
|
def serialize_version(version: Version) -> str:
|
||||||
|
return str(version)
|
||||||
|
|
||||||
|
|
||||||
|
@deserializer(Version)
|
||||||
|
def deserialize_version(version: str) -> Version:
|
||||||
|
try:
|
||||||
|
return Version(version)
|
||||||
|
except InvalidVersion as e:
|
||||||
|
raise SerializerError("Invalid version") from e
|
||||||
|
|
||||||
|
|
||||||
|
class DatabaseType(ExtensibleEnum):
|
||||||
|
SQLALCHEMY = "sqlalchemy"
|
||||||
|
ASYNCPG = "asyncpg"
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class PluginMeta(SerializableAttrs):
|
||||||
|
id: str
|
||||||
|
version: Version
|
||||||
|
modules: List[str]
|
||||||
|
main_class: str
|
||||||
|
|
||||||
|
maubot: Version = Version(__version__)
|
||||||
|
database: bool = False
|
||||||
|
database_type: DatabaseType = DatabaseType.SQLALCHEMY
|
||||||
|
config: bool = False
|
||||||
|
webapp: bool = False
|
||||||
|
license: str = ""
|
||||||
|
extra_files: List[str] = []
|
||||||
|
dependencies: List[str] = []
|
||||||
|
soft_dependencies: List[str] = []
|
||||||
|
|
||||||
|
@property
|
||||||
|
def database_type_str(self) -> Optional[str]:
|
||||||
|
return self.database_type.value if self.database else None
|
@ -1,5 +1,5 @@
|
|||||||
# maubot - A plugin-based Matrix bot system.
|
# maubot - A plugin-based Matrix bot system.
|
||||||
# Copyright (C) 2019 Tulir Asokan
|
# Copyright (C) 2022 Tulir Asokan
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
@ -13,22 +13,27 @@
|
|||||||
#
|
#
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
from typing import Dict, List, Type, Tuple, Optional
|
from __future__ import annotations
|
||||||
from zipfile import ZipFile, BadZipFile
|
|
||||||
from time import time
|
from time import time
|
||||||
|
from zipfile import BadZipFile, ZipFile
|
||||||
import logging
|
import logging
|
||||||
import sys
|
|
||||||
import os
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
from ruamel.yaml import YAML, YAMLError
|
|
||||||
from packaging.version import Version
|
from packaging.version import Version
|
||||||
from mautrix.client.api.types.util import SerializerError
|
from ruamel.yaml import YAML, YAMLError
|
||||||
|
|
||||||
from ..lib.zipimport import zipimporter, ZipImportError
|
from mautrix.types import SerializerError
|
||||||
from ..plugin_base import Plugin
|
|
||||||
|
from ..__meta__ import __version__
|
||||||
from ..config import Config
|
from ..config import Config
|
||||||
from .abc import PluginLoader, PluginClass, PluginMeta, IDConflictError
|
from ..lib.zipimport import ZipImportError, zipimporter
|
||||||
|
from ..plugin_base import Plugin
|
||||||
|
from .abc import IDConflictError, PluginClass, PluginLoader
|
||||||
|
from .meta import DatabaseType, PluginMeta
|
||||||
|
|
||||||
|
current_version = Version(__version__)
|
||||||
yaml = YAML()
|
yaml = YAML()
|
||||||
|
|
||||||
|
|
||||||
@ -49,23 +54,25 @@ class MaubotZipLoadError(MaubotZipImportError):
|
|||||||
|
|
||||||
|
|
||||||
class ZippedPluginLoader(PluginLoader):
|
class ZippedPluginLoader(PluginLoader):
|
||||||
path_cache: Dict[str, 'ZippedPluginLoader'] = {}
|
path_cache: dict[str, ZippedPluginLoader] = {}
|
||||||
log: logging.Logger = logging.getLogger("maubot.loader.zip")
|
log: logging.Logger = logging.getLogger("maubot.loader.zip")
|
||||||
trash_path: str = "delete"
|
trash_path: str = "delete"
|
||||||
directories: List[str] = []
|
directories: list[str] = []
|
||||||
|
|
||||||
path: str
|
path: str | None
|
||||||
meta: PluginMeta
|
meta: PluginMeta | None
|
||||||
main_class: str
|
main_class: str | None
|
||||||
main_module: str
|
main_module: str | None
|
||||||
_loaded: Type[PluginClass]
|
_loaded: type[PluginClass] | None
|
||||||
_importer: zipimporter
|
_importer: zipimporter | None
|
||||||
_file: ZipFile
|
_file: ZipFile | None
|
||||||
|
|
||||||
def __init__(self, path: str) -> None:
|
def __init__(self, path: str) -> None:
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.path = path
|
self.path = path
|
||||||
self.meta = None
|
self.meta = None
|
||||||
|
self.main_class = None
|
||||||
|
self.main_module = None
|
||||||
self._loaded = None
|
self._loaded = None
|
||||||
self._importer = None
|
self._importer = None
|
||||||
self._file = None
|
self._file = None
|
||||||
@ -74,7 +81,8 @@ class ZippedPluginLoader(PluginLoader):
|
|||||||
try:
|
try:
|
||||||
existing = self.id_cache[self.meta.id]
|
existing = self.id_cache[self.meta.id]
|
||||||
raise IDConflictError(
|
raise IDConflictError(
|
||||||
f"Plugin with id {self.meta.id} already loaded from {existing.source}")
|
f"Plugin with id {self.meta.id} already loaded from {existing.source}"
|
||||||
|
)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
self.path_cache[self.path] = self
|
self.path_cache[self.path] = self
|
||||||
@ -82,13 +90,10 @@ class ZippedPluginLoader(PluginLoader):
|
|||||||
self.log.debug(f"Preloaded plugin {self.meta.id} from {self.path}")
|
self.log.debug(f"Preloaded plugin {self.meta.id} from {self.path}")
|
||||||
|
|
||||||
def to_dict(self) -> dict:
|
def to_dict(self) -> dict:
|
||||||
return {
|
return {**super().to_dict(), "path": self.path}
|
||||||
**super().to_dict(),
|
|
||||||
"path": self.path
|
|
||||||
}
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get(cls, path: str) -> 'ZippedPluginLoader':
|
def get(cls, path: str) -> ZippedPluginLoader:
|
||||||
path = os.path.abspath(path)
|
path = os.path.abspath(path)
|
||||||
try:
|
try:
|
||||||
return cls.path_cache[path]
|
return cls.path_cache[path]
|
||||||
@ -100,16 +105,32 @@ class ZippedPluginLoader(PluginLoader):
|
|||||||
return self.path
|
return self.path
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
return ("<ZippedPlugin "
|
return (
|
||||||
|
"<ZippedPlugin "
|
||||||
f"path='{self.path}' "
|
f"path='{self.path}' "
|
||||||
f"meta={self.meta} "
|
f"meta={self.meta} "
|
||||||
f"loaded={self._loaded is not None}>")
|
f"loaded={self._loaded is not None}>"
|
||||||
|
)
|
||||||
|
|
||||||
async def read_file(self, path: str) -> bytes:
|
def sync_read_file(self, path: str) -> bytes:
|
||||||
return self._file.read(path)
|
return self._file.read(path)
|
||||||
|
|
||||||
|
async def read_file(self, path: str) -> bytes:
|
||||||
|
return self.sync_read_file(path)
|
||||||
|
|
||||||
|
def sync_list_files(self, directory: str) -> list[str]:
|
||||||
|
directory = directory.rstrip("/")
|
||||||
|
return [
|
||||||
|
file.filename
|
||||||
|
for file in self._file.filelist
|
||||||
|
if os.path.dirname(file.filename) == directory
|
||||||
|
]
|
||||||
|
|
||||||
|
async def list_files(self, directory: str) -> list[str]:
|
||||||
|
return self.sync_list_files(directory)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _read_meta(source) -> Tuple[ZipFile, PluginMeta]:
|
def _read_meta(source) -> tuple[ZipFile, PluginMeta]:
|
||||||
try:
|
try:
|
||||||
file = ZipFile(source)
|
file = ZipFile(source)
|
||||||
data = file.read("maubot.yaml")
|
data = file.read("maubot.yaml")
|
||||||
@ -127,12 +148,16 @@ class ZippedPluginLoader(PluginLoader):
|
|||||||
meta = PluginMeta.deserialize(meta_dict)
|
meta = PluginMeta.deserialize(meta_dict)
|
||||||
except SerializerError as e:
|
except SerializerError as e:
|
||||||
raise MaubotZipMetaError("Maubot plugin definition in file is invalid") from e
|
raise MaubotZipMetaError("Maubot plugin definition in file is invalid") from e
|
||||||
|
if meta.maubot > current_version:
|
||||||
|
raise MaubotZipMetaError(
|
||||||
|
f"Plugin requires maubot {meta.maubot}, but this instance is {current_version}"
|
||||||
|
)
|
||||||
return file, meta
|
return file, meta
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def verify_meta(cls, source) -> Tuple[str, Version]:
|
def verify_meta(cls, source) -> tuple[str, Version, DatabaseType | None]:
|
||||||
_, meta = cls._read_meta(source)
|
_, meta = cls._read_meta(source)
|
||||||
return meta.id, meta.version
|
return meta.id, meta.version, meta.database_type if meta.database else None
|
||||||
|
|
||||||
def _load_meta(self) -> None:
|
def _load_meta(self) -> None:
|
||||||
file, meta = self._read_meta(self.path)
|
file, meta = self._read_meta(self.path)
|
||||||
@ -142,7 +167,7 @@ class ZippedPluginLoader(PluginLoader):
|
|||||||
if "/" in meta.main_class:
|
if "/" in meta.main_class:
|
||||||
self.main_module, self.main_class = meta.main_class.split("/")[:2]
|
self.main_module, self.main_class = meta.main_class.split("/")[:2]
|
||||||
else:
|
else:
|
||||||
self.main_module = meta.modules[0]
|
self.main_module = meta.modules[-1]
|
||||||
self.main_class = meta.main_class
|
self.main_class = meta.main_class
|
||||||
self._file = file
|
self._file = file
|
||||||
|
|
||||||
@ -161,24 +186,24 @@ class ZippedPluginLoader(PluginLoader):
|
|||||||
code = importer.get_code(self.main_module.replace(".", "/"))
|
code = importer.get_code(self.main_module.replace(".", "/"))
|
||||||
if self.main_class not in code.co_names:
|
if self.main_class not in code.co_names:
|
||||||
raise MaubotZipPreLoadError(
|
raise MaubotZipPreLoadError(
|
||||||
f"Main class {self.main_class} not in {self.main_module}")
|
f"Main class {self.main_class} not in {self.main_module}"
|
||||||
|
)
|
||||||
except ZipImportError as e:
|
except ZipImportError as e:
|
||||||
raise MaubotZipPreLoadError(
|
raise MaubotZipPreLoadError(f"Main module {self.main_module} not found in file") from e
|
||||||
f"Main module {self.main_module} not found in file") from e
|
|
||||||
for module in self.meta.modules:
|
for module in self.meta.modules:
|
||||||
try:
|
try:
|
||||||
importer.find_module(module)
|
importer.find_module(module)
|
||||||
except ZipImportError as e:
|
except ZipImportError as e:
|
||||||
raise MaubotZipPreLoadError(f"Module {module} not found in file") from e
|
raise MaubotZipPreLoadError(f"Module {module} not found in file") from e
|
||||||
|
|
||||||
async def load(self, reset_cache: bool = False) -> Type[PluginClass]:
|
async def load(self, reset_cache: bool = False) -> type[PluginClass]:
|
||||||
try:
|
try:
|
||||||
return self._load(reset_cache)
|
return self._load(reset_cache)
|
||||||
except MaubotZipImportError:
|
except MaubotZipImportError:
|
||||||
self.log.exception(f"Failed to load {self.meta.id} v{self.meta.version}")
|
self.log.exception(f"Failed to load {self.meta.id} v{self.meta.version}")
|
||||||
raise
|
raise
|
||||||
|
|
||||||
def _load(self, reset_cache: bool = False) -> Type[PluginClass]:
|
def _load(self, reset_cache: bool = False) -> type[PluginClass]:
|
||||||
if self._loaded is not None and not reset_cache:
|
if self._loaded is not None and not reset_cache:
|
||||||
return self._loaded
|
return self._loaded
|
||||||
self._load_meta()
|
self._load_meta()
|
||||||
@ -207,13 +232,18 @@ class ZippedPluginLoader(PluginLoader):
|
|||||||
self.log.debug(f"Loaded and imported plugin {self.meta.id} from {self.path}")
|
self.log.debug(f"Loaded and imported plugin {self.meta.id} from {self.path}")
|
||||||
return plugin
|
return plugin
|
||||||
|
|
||||||
async def reload(self, new_path: Optional[str] = None) -> Type[PluginClass]:
|
async def reload(self, new_path: str | None = None) -> type[PluginClass]:
|
||||||
await self.unload()
|
self._unload()
|
||||||
if new_path is not None:
|
if new_path is not None and new_path != self.path:
|
||||||
|
try:
|
||||||
|
del self.path_cache[self.path]
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
self.path = new_path
|
self.path = new_path
|
||||||
|
self.path_cache[self.path] = self
|
||||||
return await self.load(reset_cache=True)
|
return await self.load(reset_cache=True)
|
||||||
|
|
||||||
async def unload(self) -> None:
|
def _unload(self) -> None:
|
||||||
for name, mod in list(sys.modules.items()):
|
for name, mod in list(sys.modules.items()):
|
||||||
if (getattr(mod, "__file__", "") or "").startswith(self.path):
|
if (getattr(mod, "__file__", "") or "").startswith(self.path):
|
||||||
del sys.modules[name]
|
del sys.modules[name]
|
||||||
@ -221,7 +251,7 @@ class ZippedPluginLoader(PluginLoader):
|
|||||||
self.log.debug(f"Unloaded plugin {self.meta.id} at {self.path}")
|
self.log.debug(f"Unloaded plugin {self.meta.id} at {self.path}")
|
||||||
|
|
||||||
async def delete(self) -> None:
|
async def delete(self) -> None:
|
||||||
await self.unload()
|
self._unload()
|
||||||
try:
|
try:
|
||||||
del self.path_cache[self.path]
|
del self.path_cache[self.path]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
@ -239,12 +269,22 @@ class ZippedPluginLoader(PluginLoader):
|
|||||||
self.path = None
|
self.path = None
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def trash(cls, file_path: str, new_name: Optional[str] = None, reason: str = "error") -> None:
|
def trash(cls, file_path: str, new_name: str | None = None, reason: str = "error") -> None:
|
||||||
if cls.trash_path == "delete":
|
if cls.trash_path == "delete":
|
||||||
|
try:
|
||||||
os.remove(file_path)
|
os.remove(file_path)
|
||||||
|
except FileNotFoundError:
|
||||||
|
pass
|
||||||
else:
|
else:
|
||||||
new_name = new_name or f"{int(time())}-{reason}-{os.path.basename(file_path)}"
|
new_name = new_name or f"{int(time())}-{reason}-{os.path.basename(file_path)}"
|
||||||
|
try:
|
||||||
os.rename(file_path, os.path.abspath(os.path.join(cls.trash_path, new_name)))
|
os.rename(file_path, os.path.abspath(os.path.join(cls.trash_path, new_name)))
|
||||||
|
except OSError as e:
|
||||||
|
cls.log.warning(f"Failed to rename {file_path}: {e} - trying to delete")
|
||||||
|
try:
|
||||||
|
os.remove(file_path)
|
||||||
|
except FileNotFoundError:
|
||||||
|
pass
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def load_all(cls):
|
def load_all(cls):
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# maubot - A plugin-based Matrix bot system.
|
# maubot - A plugin-based Matrix bot system.
|
||||||
# Copyright (C) 2019 Tulir Asokan
|
# Copyright (C) 2022 Tulir Asokan
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
@ -13,13 +13,14 @@
|
|||||||
#
|
#
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
from aiohttp import web
|
|
||||||
from asyncio import AbstractEventLoop
|
from asyncio import AbstractEventLoop
|
||||||
import importlib
|
import importlib
|
||||||
|
|
||||||
|
from aiohttp import web
|
||||||
|
|
||||||
from ...config import Config
|
from ...config import Config
|
||||||
from .base import routes, get_config, set_config, set_loop
|
|
||||||
from .auth import check_token
|
from .auth import check_token
|
||||||
|
from .base import get_config, routes, set_config
|
||||||
from .middleware import auth, error
|
from .middleware import auth, error
|
||||||
|
|
||||||
|
|
||||||
@ -30,14 +31,15 @@ def features(request: web.Request) -> web.Response:
|
|||||||
if err is None:
|
if err is None:
|
||||||
return web.json_response(data)
|
return web.json_response(data)
|
||||||
else:
|
else:
|
||||||
return web.json_response({
|
return web.json_response(
|
||||||
|
{
|
||||||
"login": data["login"],
|
"login": data["login"],
|
||||||
})
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def init(cfg: Config, loop: AbstractEventLoop) -> web.Application:
|
def init(cfg: Config, loop: AbstractEventLoop) -> web.Application:
|
||||||
set_config(cfg)
|
set_config(cfg)
|
||||||
set_loop(loop)
|
|
||||||
for pkg, enabled in cfg["api_features"].items():
|
for pkg, enabled in cfg["api_features"].items():
|
||||||
if enabled:
|
if enabled:
|
||||||
importlib.import_module(f"maubot.management.api.{pkg}")
|
importlib.import_module(f"maubot.management.api.{pkg}")
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# maubot - A plugin-based Matrix bot system.
|
# maubot - A plugin-based Matrix bot system.
|
||||||
# Copyright (C) 2019 Tulir Asokan
|
# Copyright (C) 2022 Tulir Asokan
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
@ -13,7 +13,8 @@
|
|||||||
#
|
#
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
from typing import Optional
|
from __future__ import annotations
|
||||||
|
|
||||||
from time import time
|
from time import time
|
||||||
|
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
@ -21,7 +22,7 @@ from aiohttp import web
|
|||||||
from mautrix.types import UserID
|
from mautrix.types import UserID
|
||||||
from mautrix.util.signed_token import sign_token, verify_token
|
from mautrix.util.signed_token import sign_token, verify_token
|
||||||
|
|
||||||
from .base import routes, get_config
|
from .base import get_config, routes
|
||||||
from .responses import resp
|
from .responses import resp
|
||||||
|
|
||||||
|
|
||||||
@ -33,22 +34,25 @@ def is_valid_token(token: str) -> bool:
|
|||||||
|
|
||||||
|
|
||||||
def create_token(user: UserID) -> str:
|
def create_token(user: UserID) -> str:
|
||||||
return sign_token(get_config()["server.unshared_secret"], {
|
return sign_token(
|
||||||
|
get_config()["server.unshared_secret"],
|
||||||
|
{
|
||||||
"user_id": user,
|
"user_id": user,
|
||||||
"created_at": int(time()),
|
"created_at": int(time()),
|
||||||
})
|
},
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def get_token(request: web.Request) -> str:
|
def get_token(request: web.Request) -> str:
|
||||||
token = request.headers.get("Authorization", "")
|
token = request.headers.get("Authorization", "")
|
||||||
if not token or not token.startswith("Bearer "):
|
if not token or not token.startswith("Bearer "):
|
||||||
token = request.query.get("access_token", None)
|
token = request.query.get("access_token", "")
|
||||||
else:
|
else:
|
||||||
token = token[len("Bearer "):]
|
token = token[len("Bearer ") :]
|
||||||
return token
|
return token
|
||||||
|
|
||||||
|
|
||||||
def check_token(request: web.Request) -> Optional[web.Response]:
|
def check_token(request: web.Request) -> web.Response | None:
|
||||||
token = get_token(request)
|
token = get_token(request)
|
||||||
if not token:
|
if not token:
|
||||||
return resp.no_token
|
return resp.no_token
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# maubot - A plugin-based Matrix bot system.
|
# maubot - A plugin-based Matrix bot system.
|
||||||
# Copyright (C) 2019 Tulir Asokan
|
# Copyright (C) 2022 Tulir Asokan
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
@ -13,15 +13,17 @@
|
|||||||
#
|
#
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
from aiohttp import web
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
|
|
||||||
|
from aiohttp import web
|
||||||
|
|
||||||
from ...__meta__ import __version__
|
from ...__meta__ import __version__
|
||||||
from ...config import Config
|
from ...config import Config
|
||||||
|
|
||||||
routes: web.RouteTableDef = web.RouteTableDef()
|
routes: web.RouteTableDef = web.RouteTableDef()
|
||||||
_config: Config = None
|
_config: Config | None = None
|
||||||
_loop: asyncio.AbstractEventLoop = None
|
|
||||||
|
|
||||||
|
|
||||||
def set_config(config: Config) -> None:
|
def set_config(config: Config) -> None:
|
||||||
@ -33,17 +35,6 @@ def get_config() -> Config:
|
|||||||
return _config
|
return _config
|
||||||
|
|
||||||
|
|
||||||
def set_loop(loop: asyncio.AbstractEventLoop) -> None:
|
|
||||||
global _loop
|
|
||||||
_loop = loop
|
|
||||||
|
|
||||||
|
|
||||||
def get_loop() -> asyncio.AbstractEventLoop:
|
|
||||||
return _loop
|
|
||||||
|
|
||||||
|
|
||||||
@routes.get("/version")
|
@routes.get("/version")
|
||||||
async def version(_: web.Request) -> web.Response:
|
async def version(_: web.Request) -> web.Response:
|
||||||
return web.json_response({
|
return web.json_response({"version": __version__})
|
||||||
"version": __version__
|
|
||||||
})
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# maubot - A plugin-based Matrix bot system.
|
# maubot - A plugin-based Matrix bot system.
|
||||||
# Copyright (C) 2019 Tulir Asokan
|
# Copyright (C) 2022 Tulir Asokan
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
@ -13,20 +13,23 @@
|
|||||||
#
|
#
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
from typing import Optional
|
from __future__ import annotations
|
||||||
|
|
||||||
from json import JSONDecodeError
|
from json import JSONDecodeError
|
||||||
|
import logging
|
||||||
|
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
|
|
||||||
from mautrix.types import UserID, SyncToken, FilterID
|
|
||||||
from mautrix.errors import MatrixRequestError, MatrixConnectionError, MatrixInvalidToken
|
|
||||||
from mautrix.client import Client as MatrixClient
|
from mautrix.client import Client as MatrixClient
|
||||||
|
from mautrix.errors import MatrixConnectionError, MatrixInvalidToken, MatrixRequestError
|
||||||
|
from mautrix.types import FilterID, SyncToken, UserID
|
||||||
|
|
||||||
from ...db import DBClient
|
|
||||||
from ...client import Client
|
from ...client import Client
|
||||||
from .base import routes
|
from .base import routes
|
||||||
from .responses import resp
|
from .responses import resp
|
||||||
|
|
||||||
|
log = logging.getLogger("maubot.server.client")
|
||||||
|
|
||||||
|
|
||||||
@routes.get("/clients")
|
@routes.get("/clients")
|
||||||
async def get_clients(_: web.Request) -> web.Response:
|
async def get_clients(_: web.Request) -> web.Response:
|
||||||
@ -36,63 +39,94 @@ async def get_clients(_: web.Request) -> web.Response:
|
|||||||
@routes.get("/client/{id}")
|
@routes.get("/client/{id}")
|
||||||
async def get_client(request: web.Request) -> web.Response:
|
async def get_client(request: web.Request) -> web.Response:
|
||||||
user_id = request.match_info.get("id", None)
|
user_id = request.match_info.get("id", None)
|
||||||
client = Client.get(user_id, None)
|
client = await Client.get(user_id)
|
||||||
if not client:
|
if not client:
|
||||||
return resp.client_not_found
|
return resp.client_not_found
|
||||||
return resp.found(client.to_dict())
|
return resp.found(client.to_dict())
|
||||||
|
|
||||||
|
|
||||||
async def _create_client(user_id: Optional[UserID], data: dict) -> web.Response:
|
async def _create_client(user_id: UserID | None, data: dict) -> web.Response:
|
||||||
homeserver = data.get("homeserver", None)
|
homeserver = data.get("homeserver", None)
|
||||||
access_token = data.get("access_token", None)
|
access_token = data.get("access_token", None)
|
||||||
new_client = MatrixClient(mxid="@not:a.mxid", base_url=homeserver, token=access_token,
|
device_id = data.get("device_id", None)
|
||||||
loop=Client.loop, client_session=Client.http_client)
|
new_client = MatrixClient(
|
||||||
|
mxid="@not:a.mxid",
|
||||||
|
base_url=homeserver,
|
||||||
|
token=access_token,
|
||||||
|
client_session=Client.http_client,
|
||||||
|
)
|
||||||
try:
|
try:
|
||||||
mxid = await new_client.whoami()
|
whoami = await new_client.whoami()
|
||||||
except MatrixInvalidToken:
|
except MatrixInvalidToken as e:
|
||||||
return resp.bad_client_access_token
|
return resp.bad_client_access_token
|
||||||
except MatrixRequestError:
|
except MatrixRequestError:
|
||||||
|
log.warning(f"Failed to get whoami from {homeserver} for new client", exc_info=True)
|
||||||
return resp.bad_client_access_details
|
return resp.bad_client_access_details
|
||||||
except MatrixConnectionError:
|
except MatrixConnectionError:
|
||||||
|
log.warning(f"Failed to connect to {homeserver} for new client", exc_info=True)
|
||||||
return resp.bad_client_connection_details
|
return resp.bad_client_connection_details
|
||||||
if user_id is None:
|
if user_id is None:
|
||||||
existing_client = Client.get(mxid, None)
|
existing_client = await Client.get(whoami.user_id)
|
||||||
if existing_client is not None:
|
if existing_client is not None:
|
||||||
return resp.user_exists
|
return resp.user_exists
|
||||||
elif mxid != user_id:
|
elif whoami.user_id != user_id:
|
||||||
return resp.mxid_mismatch(mxid)
|
return resp.mxid_mismatch(whoami.user_id)
|
||||||
db_instance = DBClient(id=mxid, homeserver=homeserver, access_token=access_token,
|
elif whoami.device_id and device_id and whoami.device_id != device_id:
|
||||||
enabled=data.get("enabled", True), next_batch=SyncToken(""),
|
return resp.device_id_mismatch(whoami.device_id)
|
||||||
filter_id=FilterID(""), sync=data.get("sync", True),
|
client = await Client.get(
|
||||||
autojoin=data.get("autojoin", True),
|
whoami.user_id, homeserver=homeserver, access_token=access_token, device_id=device_id
|
||||||
displayname=data.get("displayname", ""),
|
)
|
||||||
avatar_url=data.get("avatar_url", ""))
|
client.enabled = data.get("enabled", True)
|
||||||
client = Client(db_instance)
|
client.sync = data.get("sync", True)
|
||||||
client.db_instance.insert()
|
client.autojoin = data.get("autojoin", True)
|
||||||
|
client.online = data.get("online", True)
|
||||||
|
client.displayname = data.get("displayname", "disable")
|
||||||
|
client.avatar_url = data.get("avatar_url", "disable")
|
||||||
|
await client.update()
|
||||||
await client.start()
|
await client.start()
|
||||||
return resp.created(client.to_dict())
|
return resp.created(client.to_dict())
|
||||||
|
|
||||||
|
|
||||||
async def _update_client(client: Client, data: dict) -> web.Response:
|
async def _update_client(client: Client, data: dict, is_login: bool = False) -> web.Response:
|
||||||
try:
|
try:
|
||||||
await client.update_access_details(data.get("access_token", None),
|
await client.update_access_details(
|
||||||
data.get("homeserver", None))
|
data.get("access_token"), data.get("homeserver"), data.get("device_id")
|
||||||
|
)
|
||||||
except MatrixInvalidToken:
|
except MatrixInvalidToken:
|
||||||
return resp.bad_client_access_token
|
return resp.bad_client_access_token
|
||||||
except MatrixRequestError:
|
except MatrixRequestError:
|
||||||
|
log.warning(
|
||||||
|
f"Failed to get whoami from homeserver to update client details", exc_info=True
|
||||||
|
)
|
||||||
return resp.bad_client_access_details
|
return resp.bad_client_access_details
|
||||||
except MatrixConnectionError:
|
except MatrixConnectionError:
|
||||||
|
log.warning(f"Failed to connect to homeserver to update client details", exc_info=True)
|
||||||
return resp.bad_client_connection_details
|
return resp.bad_client_connection_details
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
return resp.mxid_mismatch(str(e)[len("MXID mismatch: "):])
|
str_err = str(e)
|
||||||
with client.db_instance.edit_mode():
|
if str_err.startswith("MXID mismatch"):
|
||||||
await client.update_avatar_url(data.get("avatar_url", None))
|
return resp.mxid_mismatch(str(e)[len("MXID mismatch: ") :])
|
||||||
await client.update_displayname(data.get("displayname", None))
|
elif str_err.startswith("Device ID mismatch"):
|
||||||
await client.update_started(data.get("started", None))
|
return resp.device_id_mismatch(str(e)[len("Device ID mismatch: ") :])
|
||||||
client.enabled = data.get("enabled", client.enabled)
|
await client.update_avatar_url(data.get("avatar_url"), save=False)
|
||||||
client.autojoin = data.get("autojoin", client.autojoin)
|
await client.update_displayname(data.get("displayname"), save=False)
|
||||||
client.sync = data.get("sync", client.sync)
|
await client.update_started(data.get("started"))
|
||||||
return resp.updated(client.to_dict())
|
await client.update_enabled(data.get("enabled"), save=False)
|
||||||
|
await client.update_autojoin(data.get("autojoin"), save=False)
|
||||||
|
await client.update_online(data.get("online"), save=False)
|
||||||
|
await client.update_sync(data.get("sync"), save=False)
|
||||||
|
await client.update()
|
||||||
|
return resp.updated(client.to_dict(), is_login=is_login)
|
||||||
|
|
||||||
|
|
||||||
|
async def _create_or_update_client(
|
||||||
|
user_id: UserID, data: dict, is_login: bool = False
|
||||||
|
) -> web.Response:
|
||||||
|
client = await Client.get(user_id)
|
||||||
|
if not client:
|
||||||
|
return await _create_client(user_id, data)
|
||||||
|
else:
|
||||||
|
return await _update_client(client, data, is_login=is_login)
|
||||||
|
|
||||||
|
|
||||||
@routes.post("/client/new")
|
@routes.post("/client/new")
|
||||||
@ -106,37 +140,33 @@ async def create_client(request: web.Request) -> web.Response:
|
|||||||
|
|
||||||
@routes.put("/client/{id}")
|
@routes.put("/client/{id}")
|
||||||
async def update_client(request: web.Request) -> web.Response:
|
async def update_client(request: web.Request) -> web.Response:
|
||||||
user_id = request.match_info.get("id", None)
|
user_id = request.match_info["id"]
|
||||||
client = Client.get(user_id, None)
|
|
||||||
try:
|
try:
|
||||||
data = await request.json()
|
data = await request.json()
|
||||||
except JSONDecodeError:
|
except JSONDecodeError:
|
||||||
return resp.body_not_json
|
return resp.body_not_json
|
||||||
if not client:
|
return await _create_or_update_client(user_id, data)
|
||||||
return await _create_client(user_id, data)
|
|
||||||
else:
|
|
||||||
return await _update_client(client, data)
|
|
||||||
|
|
||||||
|
|
||||||
@routes.delete("/client/{id}")
|
@routes.delete("/client/{id}")
|
||||||
async def delete_client(request: web.Request) -> web.Response:
|
async def delete_client(request: web.Request) -> web.Response:
|
||||||
user_id = request.match_info.get("id", None)
|
user_id = request.match_info["id"]
|
||||||
client = Client.get(user_id, None)
|
client = await Client.get(user_id)
|
||||||
if not client:
|
if not client:
|
||||||
return resp.client_not_found
|
return resp.client_not_found
|
||||||
if len(client.references) > 0:
|
if len(client.references) > 0:
|
||||||
return resp.client_in_use
|
return resp.client_in_use
|
||||||
if client.started:
|
if client.started:
|
||||||
await client.stop()
|
await client.stop()
|
||||||
client.delete()
|
await client.delete()
|
||||||
return resp.deleted
|
return resp.deleted
|
||||||
|
|
||||||
|
|
||||||
@routes.post("/client/{id}/clearcache")
|
@routes.post("/client/{id}/clearcache")
|
||||||
async def clear_client_cache(request: web.Request) -> web.Response:
|
async def clear_client_cache(request: web.Request) -> web.Response:
|
||||||
user_id = request.match_info.get("id", None)
|
user_id = request.match_info["id"]
|
||||||
client = Client.get(user_id, None)
|
client = await Client.get(user_id)
|
||||||
if not client:
|
if not client:
|
||||||
return resp.client_not_found
|
return resp.client_not_found
|
||||||
client.clear_cache()
|
await client.clear_cache()
|
||||||
return resp.ok
|
return resp.ok
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# maubot - A plugin-based Matrix bot system.
|
# maubot - A plugin-based Matrix bot system.
|
||||||
# Copyright (C) 2019 Tulir Asokan
|
# Copyright (C) 2022 Tulir Asokan
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
@ -13,118 +13,261 @@
|
|||||||
#
|
#
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
from typing import Dict, Tuple, NamedTuple, Optional
|
from __future__ import annotations
|
||||||
from json import JSONDecodeError
|
|
||||||
|
from typing import NamedTuple
|
||||||
from http import HTTPStatus
|
from http import HTTPStatus
|
||||||
|
from json import JSONDecodeError
|
||||||
|
import asyncio
|
||||||
import hashlib
|
import hashlib
|
||||||
|
import hmac
|
||||||
import random
|
import random
|
||||||
import string
|
import string
|
||||||
import hmac
|
|
||||||
|
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
from mautrix.api import HTTPAPI, Path, Method
|
from yarl import URL
|
||||||
from mautrix.errors import MatrixRequestError
|
|
||||||
|
|
||||||
from .base import routes, get_config, get_loop
|
from mautrix.api import Method, Path, SynapseAdminPath
|
||||||
|
from mautrix.client import ClientAPI
|
||||||
|
from mautrix.errors import MatrixRequestError
|
||||||
|
from mautrix.types import LoginResponse, LoginType
|
||||||
|
|
||||||
|
from .base import get_config, routes
|
||||||
|
from .client import _create_client, _create_or_update_client
|
||||||
from .responses import resp
|
from .responses import resp
|
||||||
|
|
||||||
|
|
||||||
def registration_secrets() -> Dict[str, Dict[str, str]]:
|
def known_homeservers() -> dict[str, dict[str, str]]:
|
||||||
return get_config()["registration_secrets"]
|
return get_config()["homeservers"]
|
||||||
|
|
||||||
|
|
||||||
def generate_mac(secret: str, nonce: str, user: str, password: str, admin: bool = False):
|
|
||||||
mac = hmac.new(key=secret.encode("utf-8"), digestmod=hashlib.sha1)
|
|
||||||
mac.update(nonce.encode("utf-8"))
|
|
||||||
mac.update(b"\x00")
|
|
||||||
mac.update(user.encode("utf-8"))
|
|
||||||
mac.update(b"\x00")
|
|
||||||
mac.update(password.encode("utf-8"))
|
|
||||||
mac.update(b"\x00")
|
|
||||||
mac.update(b"admin" if admin else b"notadmin")
|
|
||||||
return mac.hexdigest()
|
|
||||||
|
|
||||||
|
|
||||||
@routes.get("/client/auth/servers")
|
@routes.get("/client/auth/servers")
|
||||||
async def get_registerable_servers(_: web.Request) -> web.Response:
|
async def get_known_servers(_: web.Request) -> web.Response:
|
||||||
return web.json_response({key: value["url"] for key, value in registration_secrets().items()})
|
return web.json_response({key: value["url"] for key, value in known_homeservers().items()})
|
||||||
|
|
||||||
|
|
||||||
AuthRequestInfo = NamedTuple("AuthRequestInfo", api=HTTPAPI, secret=str, username=str,
|
class AuthRequestInfo(NamedTuple):
|
||||||
password=str, user_type=str)
|
server_name: str
|
||||||
|
client: ClientAPI
|
||||||
|
secret: str
|
||||||
|
username: str
|
||||||
|
password: str
|
||||||
|
user_type: str
|
||||||
|
device_name: str
|
||||||
|
update_client: bool
|
||||||
|
sso: bool
|
||||||
|
|
||||||
|
|
||||||
async def read_client_auth_request(request: web.Request) -> Tuple[Optional[AuthRequestInfo],
|
truthy_strings = ("1", "true", "yes")
|
||||||
Optional[web.Response]]:
|
|
||||||
|
|
||||||
|
async def read_client_auth_request(
|
||||||
|
request: web.Request,
|
||||||
|
) -> tuple[AuthRequestInfo | None, web.Response | None]:
|
||||||
server_name = request.match_info.get("server", None)
|
server_name = request.match_info.get("server", None)
|
||||||
server = registration_secrets().get(server_name, None)
|
server = known_homeservers().get(server_name, None)
|
||||||
if not server:
|
if not server:
|
||||||
return None, resp.server_not_found
|
return None, resp.server_not_found
|
||||||
try:
|
try:
|
||||||
body = await request.json()
|
body = await request.json()
|
||||||
except JSONDecodeError:
|
except JSONDecodeError:
|
||||||
return None, resp.body_not_json
|
return None, resp.body_not_json
|
||||||
|
sso = request.query.get("sso", "").lower() in truthy_strings
|
||||||
try:
|
try:
|
||||||
username = body["username"]
|
username = body["username"]
|
||||||
password = body["password"]
|
password = body["password"]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
|
if not sso:
|
||||||
return None, resp.username_or_password_missing
|
return None, resp.username_or_password_missing
|
||||||
|
username = password = None
|
||||||
try:
|
try:
|
||||||
base_url = server["url"]
|
base_url = server["url"]
|
||||||
secret = server["secret"]
|
|
||||||
except KeyError:
|
except KeyError:
|
||||||
return None, resp.invalid_server
|
return None, resp.invalid_server
|
||||||
api = HTTPAPI(base_url, "", loop=get_loop())
|
return (
|
||||||
user_type = body.get("user_type", None)
|
AuthRequestInfo(
|
||||||
return AuthRequestInfo(api, secret, username, password, user_type), None
|
server_name=server_name,
|
||||||
|
client=ClientAPI(base_url=base_url),
|
||||||
|
secret=server.get("secret"),
|
||||||
|
username=username,
|
||||||
|
password=password,
|
||||||
|
user_type=body.get("user_type", "bot"),
|
||||||
|
device_name=body.get("device_name", "Maubot"),
|
||||||
|
update_client=request.query.get("update_client", "").lower() in truthy_strings,
|
||||||
|
sso=sso,
|
||||||
|
),
|
||||||
|
None,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def generate_mac(
|
||||||
|
secret: str,
|
||||||
|
nonce: str,
|
||||||
|
username: str,
|
||||||
|
password: str,
|
||||||
|
admin: bool = False,
|
||||||
|
user_type: str = None,
|
||||||
|
) -> str:
|
||||||
|
mac = hmac.new(key=secret.encode("utf-8"), digestmod=hashlib.sha1)
|
||||||
|
mac.update(nonce.encode("utf-8"))
|
||||||
|
mac.update(b"\x00")
|
||||||
|
mac.update(username.encode("utf-8"))
|
||||||
|
mac.update(b"\x00")
|
||||||
|
mac.update(password.encode("utf-8"))
|
||||||
|
mac.update(b"\x00")
|
||||||
|
mac.update(b"admin" if admin else b"notadmin")
|
||||||
|
if user_type is not None:
|
||||||
|
mac.update(b"\x00")
|
||||||
|
mac.update(user_type.encode("utf8"))
|
||||||
|
return mac.hexdigest()
|
||||||
|
|
||||||
|
|
||||||
@routes.post("/client/auth/{server}/register")
|
@routes.post("/client/auth/{server}/register")
|
||||||
async def register(request: web.Request) -> web.Response:
|
async def register(request: web.Request) -> web.Response:
|
||||||
info, err = await read_client_auth_request(request)
|
req, err = await read_client_auth_request(request)
|
||||||
if err is not None:
|
if err is not None:
|
||||||
return err
|
return err
|
||||||
api, secret, username, password, user_type = info
|
if req.sso:
|
||||||
res = await api.request(Method.GET, Path.admin.register)
|
return resp.registration_no_sso
|
||||||
nonce = res["nonce"]
|
elif not req.secret:
|
||||||
mac = generate_mac(secret, nonce, username, password)
|
return resp.registration_secret_not_found
|
||||||
try:
|
path = SynapseAdminPath.v1.register
|
||||||
return web.json_response(await api.request(Method.POST, Path.admin.register, content={
|
res = await req.client.api.request(Method.GET, path)
|
||||||
"nonce": nonce,
|
content = {
|
||||||
"username": username,
|
"nonce": res["nonce"],
|
||||||
"password": password,
|
"username": req.username,
|
||||||
|
"password": req.password,
|
||||||
"admin": False,
|
"admin": False,
|
||||||
"mac": mac,
|
"user_type": req.user_type,
|
||||||
# Older versions of synapse will ignore this field if it is None
|
}
|
||||||
"user_type": user_type,
|
content["mac"] = generate_mac(**content, secret=req.secret)
|
||||||
}))
|
try:
|
||||||
|
raw_res = await req.client.api.request(Method.POST, path, content=content)
|
||||||
except MatrixRequestError as e:
|
except MatrixRequestError as e:
|
||||||
return web.json_response({
|
return web.json_response(
|
||||||
|
{
|
||||||
"errcode": e.errcode,
|
"errcode": e.errcode,
|
||||||
"error": e.message,
|
"error": e.message,
|
||||||
"http_status": e.http_status,
|
"http_status": e.http_status,
|
||||||
}, status=HTTPStatus.INTERNAL_SERVER_ERROR)
|
},
|
||||||
|
status=HTTPStatus.INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
login_res = LoginResponse.deserialize(raw_res)
|
||||||
|
if req.update_client:
|
||||||
|
return await _create_client(
|
||||||
|
login_res.user_id,
|
||||||
|
{
|
||||||
|
"homeserver": str(req.client.api.base_url),
|
||||||
|
"access_token": login_res.access_token,
|
||||||
|
"device_id": login_res.device_id,
|
||||||
|
},
|
||||||
|
)
|
||||||
|
return web.json_response(login_res.serialize())
|
||||||
|
|
||||||
|
|
||||||
@routes.post("/client/auth/{server}/login")
|
@routes.post("/client/auth/{server}/login")
|
||||||
async def login(request: web.Request) -> web.Response:
|
async def login(request: web.Request) -> web.Response:
|
||||||
info, err = await read_client_auth_request(request)
|
req, err = await read_client_auth_request(request)
|
||||||
if err is not None:
|
if err is not None:
|
||||||
return err
|
return err
|
||||||
api, _, username, password, _ = info
|
if req.sso:
|
||||||
device_id = ''.join(random.choices(string.ascii_uppercase + string.digits, k=8))
|
return await _do_sso(req)
|
||||||
|
else:
|
||||||
|
return await _do_login(req)
|
||||||
|
|
||||||
|
|
||||||
|
async def _do_sso(req: AuthRequestInfo) -> web.Response:
|
||||||
|
flows = await req.client.get_login_flows()
|
||||||
|
if not flows.supports_type(LoginType.SSO):
|
||||||
|
return resp.sso_not_supported
|
||||||
|
waiter_id = "".join(random.choices(string.ascii_lowercase + string.digits, k=16))
|
||||||
|
cfg = get_config()
|
||||||
|
public_url = (
|
||||||
|
URL(cfg["server.public_url"])
|
||||||
|
/ "_matrix/maubot/v1/client/auth_external_sso/complete"
|
||||||
|
/ waiter_id
|
||||||
|
)
|
||||||
|
sso_url = req.client.api.base_url.with_path(str(Path.v3.login.sso.redirect)).with_query(
|
||||||
|
{"redirectUrl": str(public_url)}
|
||||||
|
)
|
||||||
|
sso_waiters[waiter_id] = req, asyncio.get_running_loop().create_future()
|
||||||
|
return web.json_response({"sso_url": str(sso_url), "id": waiter_id})
|
||||||
|
|
||||||
|
|
||||||
|
async def _do_login(req: AuthRequestInfo, login_token: str | None = None) -> web.Response:
|
||||||
|
device_id = "".join(random.choices(string.ascii_uppercase + string.digits, k=8))
|
||||||
|
device_id = f"maubot_{device_id}"
|
||||||
try:
|
try:
|
||||||
return web.json_response(await api.request(Method.POST, Path.login, content={
|
if req.sso:
|
||||||
"type": "m.login.password",
|
res = await req.client.login(
|
||||||
"identifier": {
|
token=login_token,
|
||||||
"type": "m.id.user",
|
login_type=LoginType.TOKEN,
|
||||||
"user": username,
|
device_id=device_id,
|
||||||
},
|
store_access_token=False,
|
||||||
"password": password,
|
initial_device_display_name=req.device_name,
|
||||||
"device_id": f"maubot_{device_id}",
|
)
|
||||||
}))
|
else:
|
||||||
|
res = await req.client.login(
|
||||||
|
identifier=req.username,
|
||||||
|
login_type=LoginType.PASSWORD,
|
||||||
|
password=req.password,
|
||||||
|
device_id=device_id,
|
||||||
|
initial_device_display_name=req.device_name,
|
||||||
|
store_access_token=False,
|
||||||
|
)
|
||||||
except MatrixRequestError as e:
|
except MatrixRequestError as e:
|
||||||
return web.json_response({
|
return web.json_response(
|
||||||
|
{
|
||||||
"errcode": e.errcode,
|
"errcode": e.errcode,
|
||||||
"error": e.message,
|
"error": e.message,
|
||||||
}, status=e.http_status)
|
},
|
||||||
|
status=e.http_status,
|
||||||
|
)
|
||||||
|
if req.update_client:
|
||||||
|
return await _create_or_update_client(
|
||||||
|
res.user_id,
|
||||||
|
{
|
||||||
|
"homeserver": str(req.client.api.base_url),
|
||||||
|
"access_token": res.access_token,
|
||||||
|
"device_id": res.device_id,
|
||||||
|
},
|
||||||
|
is_login=True,
|
||||||
|
)
|
||||||
|
return web.json_response(res.serialize())
|
||||||
|
|
||||||
|
|
||||||
|
sso_waiters: dict[str, tuple[AuthRequestInfo, asyncio.Future]] = {}
|
||||||
|
|
||||||
|
|
||||||
|
@routes.post("/client/auth/{server}/sso/{id}/wait")
|
||||||
|
async def wait_sso(request: web.Request) -> web.Response:
|
||||||
|
waiter_id = request.match_info["id"]
|
||||||
|
req, fut = sso_waiters[waiter_id]
|
||||||
|
try:
|
||||||
|
login_token = await fut
|
||||||
|
finally:
|
||||||
|
sso_waiters.pop(waiter_id, None)
|
||||||
|
return await _do_login(req, login_token)
|
||||||
|
|
||||||
|
|
||||||
|
@routes.get("/client/auth_external_sso/complete/{id}")
|
||||||
|
async def complete_sso(request: web.Request) -> web.Response:
|
||||||
|
try:
|
||||||
|
_, fut = sso_waiters[request.match_info["id"]]
|
||||||
|
except KeyError:
|
||||||
|
return web.Response(status=404, text="Invalid session ID\n")
|
||||||
|
if fut.cancelled():
|
||||||
|
return web.Response(status=200, text="The login was cancelled from the Maubot client\n")
|
||||||
|
elif fut.done():
|
||||||
|
return web.Response(status=200, text="The login token was already received\n")
|
||||||
|
try:
|
||||||
|
fut.set_result(request.query["loginToken"])
|
||||||
|
except KeyError:
|
||||||
|
return web.Response(status=400, text="Missing loginToken query parameter\n")
|
||||||
|
except asyncio.InvalidStateError:
|
||||||
|
return web.Response(status=500, text="Invalid state\n")
|
||||||
|
return web.Response(
|
||||||
|
status=200,
|
||||||
|
text="Login token received, please return to your Maubot client. "
|
||||||
|
"This tab can be closed.\n",
|
||||||
|
)
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# maubot - A plugin-based Matrix bot system.
|
# maubot - A plugin-based Matrix bot system.
|
||||||
# Copyright (C) 2019 Tulir Asokan
|
# Copyright (C) 2022 Tulir Asokan
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
@ -13,7 +13,7 @@
|
|||||||
#
|
#
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
from aiohttp import web, client as http
|
from aiohttp import client as http, web
|
||||||
|
|
||||||
from ...client import Client
|
from ...client import Client
|
||||||
from .base import routes
|
from .base import routes
|
||||||
@ -25,7 +25,7 @@ PROXY_CHUNK_SIZE = 32 * 1024
|
|||||||
@routes.view("/proxy/{id}/{path:_matrix/.+}")
|
@routes.view("/proxy/{id}/{path:_matrix/.+}")
|
||||||
async def proxy(request: web.Request) -> web.StreamResponse:
|
async def proxy(request: web.Request) -> web.StreamResponse:
|
||||||
user_id = request.match_info.get("id", None)
|
user_id = request.match_info.get("id", None)
|
||||||
client = Client.get(user_id, None)
|
client = await Client.get(user_id)
|
||||||
if not client:
|
if not client:
|
||||||
return resp.client_not_found
|
return resp.client_not_found
|
||||||
|
|
||||||
@ -45,8 +45,9 @@ async def proxy(request: web.Request) -> web.StreamResponse:
|
|||||||
headers["X-Forwarded-For"] = f"{host}:{port}"
|
headers["X-Forwarded-For"] = f"{host}:{port}"
|
||||||
|
|
||||||
data = await request.read()
|
data = await request.read()
|
||||||
async with http.request(request.method, f"{client.homeserver}/{path}", headers=headers,
|
async with http.request(
|
||||||
params=query, data=data) as proxy_resp:
|
request.method, f"{client.homeserver}/{path}", headers=headers, params=query, data=data
|
||||||
|
) as proxy_resp:
|
||||||
response = web.StreamResponse(status=proxy_resp.status, headers=proxy_resp.headers)
|
response = web.StreamResponse(status=proxy_resp.status, headers=proxy_resp.headers)
|
||||||
await response.prepare(request)
|
await response.prepare(request)
|
||||||
async for chunk in proxy_resp.content.iter_chunked(PROXY_CHUNK_SIZE):
|
async for chunk in proxy_resp.content.iter_chunked(PROXY_CHUNK_SIZE):
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# maubot - A plugin-based Matrix bot system.
|
# maubot - A plugin-based Matrix bot system.
|
||||||
# Copyright (C) 2019 Tulir Asokan
|
# Copyright (C) 2022 Tulir Asokan
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
@ -14,11 +14,11 @@
|
|||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
from string import Template
|
from string import Template
|
||||||
from subprocess import run
|
import asyncio
|
||||||
import re
|
import re
|
||||||
|
|
||||||
from ruamel.yaml import YAML
|
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
|
from ruamel.yaml import YAML
|
||||||
|
|
||||||
from .base import routes
|
from .base import routes
|
||||||
|
|
||||||
@ -27,9 +27,7 @@ enabled = False
|
|||||||
|
|
||||||
@routes.get("/debug/open")
|
@routes.get("/debug/open")
|
||||||
async def check_enabled(_: web.Request) -> web.Response:
|
async def check_enabled(_: web.Request) -> web.Response:
|
||||||
return web.json_response({
|
return web.json_response({"enabled": enabled})
|
||||||
"enabled": enabled,
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -40,7 +38,6 @@ try:
|
|||||||
editor_command = Template(cfg["editor"])
|
editor_command = Template(cfg["editor"])
|
||||||
pathmap = [(re.compile(item["find"]), item["replace"]) for item in cfg["pathmap"]]
|
pathmap = [(re.compile(item["find"]), item["replace"]) for item in cfg["pathmap"]]
|
||||||
|
|
||||||
|
|
||||||
@routes.post("/debug/open")
|
@routes.post("/debug/open")
|
||||||
async def open_file(request: web.Request) -> web.Response:
|
async def open_file(request: web.Request) -> web.Response:
|
||||||
data = await request.json()
|
data = await request.json()
|
||||||
@ -51,13 +48,9 @@ try:
|
|||||||
cmd = editor_command.substitute(path=path, line=data["line"])
|
cmd = editor_command.substitute(path=path, line=data["line"])
|
||||||
except (KeyError, ValueError):
|
except (KeyError, ValueError):
|
||||||
return web.Response(status=400)
|
return web.Response(status=400)
|
||||||
res = run(cmd, shell=True)
|
res = await asyncio.create_subprocess_shell(cmd)
|
||||||
return web.json_response({
|
stdout, stderr = await res.communicate()
|
||||||
"return": res.returncode,
|
return web.json_response({"return": res.returncode, "stdout": stdout, "stderr": stderr})
|
||||||
"stdout": res.stdout,
|
|
||||||
"stderr": res.stderr
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
||||||
enabled = True
|
enabled = True
|
||||||
except Exception:
|
except Exception:
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# maubot - A plugin-based Matrix bot system.
|
# maubot - A plugin-based Matrix bot system.
|
||||||
# Copyright (C) 2019 Tulir Asokan
|
# Copyright (C) 2022 Tulir Asokan
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
@ -17,10 +17,9 @@ from json import JSONDecodeError
|
|||||||
|
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
|
|
||||||
from ...db import DBPlugin
|
from ...client import Client
|
||||||
from ...instance import PluginInstance
|
from ...instance import PluginInstance
|
||||||
from ...loader import PluginLoader
|
from ...loader import PluginLoader
|
||||||
from ...client import Client
|
|
||||||
from .base import routes
|
from .base import routes
|
||||||
from .responses import resp
|
from .responses import resp
|
||||||
|
|
||||||
@ -32,51 +31,50 @@ async def get_instances(_: web.Request) -> web.Response:
|
|||||||
|
|
||||||
@routes.get("/instance/{id}")
|
@routes.get("/instance/{id}")
|
||||||
async def get_instance(request: web.Request) -> web.Response:
|
async def get_instance(request: web.Request) -> web.Response:
|
||||||
instance_id = request.match_info.get("id", "").lower()
|
instance_id = request.match_info["id"].lower()
|
||||||
instance = PluginInstance.get(instance_id, None)
|
instance = await PluginInstance.get(instance_id)
|
||||||
if not instance:
|
if not instance:
|
||||||
return resp.instance_not_found
|
return resp.instance_not_found
|
||||||
return resp.found(instance.to_dict())
|
return resp.found(instance.to_dict())
|
||||||
|
|
||||||
|
|
||||||
async def _create_instance(instance_id: str, data: dict) -> web.Response:
|
async def _create_instance(instance_id: str, data: dict) -> web.Response:
|
||||||
plugin_type = data.get("type", None)
|
plugin_type = data.get("type")
|
||||||
primary_user = data.get("primary_user", None)
|
primary_user = data.get("primary_user")
|
||||||
if not plugin_type:
|
if not plugin_type:
|
||||||
return resp.plugin_type_required
|
return resp.plugin_type_required
|
||||||
elif not primary_user:
|
elif not primary_user:
|
||||||
return resp.primary_user_required
|
return resp.primary_user_required
|
||||||
elif not Client.get(primary_user):
|
elif not await Client.get(primary_user):
|
||||||
return resp.primary_user_not_found
|
return resp.primary_user_not_found
|
||||||
try:
|
try:
|
||||||
PluginLoader.find(plugin_type)
|
PluginLoader.find(plugin_type)
|
||||||
except KeyError:
|
except KeyError:
|
||||||
return resp.plugin_type_not_found
|
return resp.plugin_type_not_found
|
||||||
db_instance = DBPlugin(id=instance_id, type=plugin_type, enabled=data.get("enabled", True),
|
instance = await PluginInstance.get(instance_id, type=plugin_type, primary_user=primary_user)
|
||||||
primary_user=primary_user, config=data.get("config", ""))
|
instance.enabled = data.get("enabled", True)
|
||||||
instance = PluginInstance(db_instance)
|
instance.config_str = data.get("config") or ""
|
||||||
instance.load()
|
await instance.update()
|
||||||
instance.db_instance.insert()
|
await instance.load()
|
||||||
await instance.start()
|
await instance.start()
|
||||||
return resp.created(instance.to_dict())
|
return resp.created(instance.to_dict())
|
||||||
|
|
||||||
|
|
||||||
async def _update_instance(instance: PluginInstance, data: dict) -> web.Response:
|
async def _update_instance(instance: PluginInstance, data: dict) -> web.Response:
|
||||||
if not await instance.update_primary_user(data.get("primary_user", None)):
|
if not await instance.update_primary_user(data.get("primary_user")):
|
||||||
return resp.primary_user_not_found
|
return resp.primary_user_not_found
|
||||||
with instance.db_instance.edit_mode():
|
await instance.update_id(data.get("id"))
|
||||||
instance.update_id(data.get("id", None))
|
await instance.update_enabled(data.get("enabled"))
|
||||||
instance.update_enabled(data.get("enabled", None))
|
await instance.update_config(data.get("config"))
|
||||||
instance.update_config(data.get("config", None))
|
await instance.update_started(data.get("started"))
|
||||||
await instance.update_started(data.get("started", None))
|
await instance.update_type(data.get("type"))
|
||||||
await instance.update_type(data.get("type", None))
|
|
||||||
return resp.updated(instance.to_dict())
|
return resp.updated(instance.to_dict())
|
||||||
|
|
||||||
|
|
||||||
@routes.put("/instance/{id}")
|
@routes.put("/instance/{id}")
|
||||||
async def update_instance(request: web.Request) -> web.Response:
|
async def update_instance(request: web.Request) -> web.Response:
|
||||||
instance_id = request.match_info.get("id", "").lower()
|
instance_id = request.match_info["id"].lower()
|
||||||
instance = PluginInstance.get(instance_id, None)
|
instance = await PluginInstance.get(instance_id)
|
||||||
try:
|
try:
|
||||||
data = await request.json()
|
data = await request.json()
|
||||||
except JSONDecodeError:
|
except JSONDecodeError:
|
||||||
@ -89,11 +87,11 @@ async def update_instance(request: web.Request) -> web.Response:
|
|||||||
|
|
||||||
@routes.delete("/instance/{id}")
|
@routes.delete("/instance/{id}")
|
||||||
async def delete_instance(request: web.Request) -> web.Response:
|
async def delete_instance(request: web.Request) -> web.Response:
|
||||||
instance_id = request.match_info.get("id", "").lower()
|
instance_id = request.match_info["id"].lower()
|
||||||
instance = PluginInstance.get(instance_id)
|
instance = await PluginInstance.get(instance_id)
|
||||||
if not instance:
|
if not instance:
|
||||||
return resp.instance_not_found
|
return resp.instance_not_found
|
||||||
if instance.started:
|
if instance.started:
|
||||||
await instance.stop()
|
await instance.stop()
|
||||||
instance.delete()
|
await instance.delete()
|
||||||
return resp.deleted
|
return resp.deleted
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# maubot - A plugin-based Matrix bot system.
|
# maubot - A plugin-based Matrix bot system.
|
||||||
# Copyright (C) 2019 Tulir Asokan
|
# Copyright (C) 2022 Tulir Asokan
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
@ -13,80 +13,67 @@
|
|||||||
#
|
#
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
from typing import Union, TYPE_CHECKING
|
from __future__ import annotations
|
||||||
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
from sqlalchemy import Table, Column, asc, desc, exc
|
from asyncpg import PostgresError
|
||||||
from sqlalchemy.orm import Query
|
import aiosqlite
|
||||||
from sqlalchemy.engine.result import ResultProxy, RowProxy
|
|
||||||
|
from mautrix.util.async_db import Database
|
||||||
|
|
||||||
from ...instance import PluginInstance
|
from ...instance import PluginInstance
|
||||||
|
from ...lib.optionalalchemy import Engine, IntegrityError, OperationalError, asc, desc
|
||||||
from .base import routes
|
from .base import routes
|
||||||
from .responses import resp
|
from .responses import resp
|
||||||
|
|
||||||
|
|
||||||
@routes.get("/instance/{id}/database")
|
@routes.get("/instance/{id}/database")
|
||||||
async def get_database(request: web.Request) -> web.Response:
|
async def get_database(request: web.Request) -> web.Response:
|
||||||
instance_id = request.match_info.get("id", "")
|
instance_id = request.match_info["id"].lower()
|
||||||
instance = PluginInstance.get(instance_id, None)
|
instance = await PluginInstance.get(instance_id)
|
||||||
if not instance:
|
if not instance:
|
||||||
return resp.instance_not_found
|
return resp.instance_not_found
|
||||||
elif not instance.inst_db:
|
elif not instance.inst_db:
|
||||||
return resp.plugin_has_no_database
|
return resp.plugin_has_no_database
|
||||||
if TYPE_CHECKING:
|
return web.json_response(await instance.get_db_tables())
|
||||||
table: Table
|
|
||||||
column: Column
|
|
||||||
return web.json_response({
|
|
||||||
table.name: {
|
|
||||||
"columns": {
|
|
||||||
column.name: {
|
|
||||||
"type": str(column.type),
|
|
||||||
"unique": column.unique or False,
|
|
||||||
"default": column.default,
|
|
||||||
"nullable": column.nullable,
|
|
||||||
"primary": column.primary_key,
|
|
||||||
"autoincrement": column.autoincrement,
|
|
||||||
} for column in table.columns
|
|
||||||
},
|
|
||||||
} for table in instance.get_db_tables().values()
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
||||||
def check_type(val):
|
|
||||||
if isinstance(val, datetime):
|
|
||||||
return val.isoformat()
|
|
||||||
return val
|
|
||||||
|
|
||||||
|
|
||||||
@routes.get("/instance/{id}/database/{table}")
|
@routes.get("/instance/{id}/database/{table}")
|
||||||
async def get_table(request: web.Request) -> web.Response:
|
async def get_table(request: web.Request) -> web.Response:
|
||||||
instance_id = request.match_info.get("id", "")
|
instance_id = request.match_info["id"].lower()
|
||||||
instance = PluginInstance.get(instance_id, None)
|
instance = await PluginInstance.get(instance_id)
|
||||||
if not instance:
|
if not instance:
|
||||||
return resp.instance_not_found
|
return resp.instance_not_found
|
||||||
elif not instance.inst_db:
|
elif not instance.inst_db:
|
||||||
return resp.plugin_has_no_database
|
return resp.plugin_has_no_database
|
||||||
tables = instance.get_db_tables()
|
tables = await instance.get_db_tables()
|
||||||
try:
|
try:
|
||||||
table = tables[request.match_info.get("table", "")]
|
table = tables[request.match_info.get("table", "")]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
return resp.table_not_found
|
return resp.table_not_found
|
||||||
try:
|
try:
|
||||||
order = [tuple(order.split(":")) for order in request.query.getall("order")]
|
order = [tuple(order.split(":")) for order in request.query.getall("order")]
|
||||||
order = [(asc if sort.lower() == "asc" else desc)(table.columns[column])
|
order = [
|
||||||
if sort else table.columns[column]
|
(
|
||||||
for column, sort in order]
|
(asc if sort.lower() == "asc" else desc)(table.columns[column])
|
||||||
|
if sort
|
||||||
|
else table.columns[column]
|
||||||
|
)
|
||||||
|
for column, sort in order
|
||||||
|
]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
order = []
|
order = []
|
||||||
limit = int(request.query.get("limit", 100))
|
limit = int(request.query.get("limit", "100"))
|
||||||
return execute_query(instance, table.select().order_by(*order).limit(limit))
|
if isinstance(instance.inst_db, Engine):
|
||||||
|
return _execute_query_sqlalchemy(instance, table.select().order_by(*order).limit(limit))
|
||||||
|
|
||||||
|
|
||||||
@routes.post("/instance/{id}/database/query")
|
@routes.post("/instance/{id}/database/query")
|
||||||
async def query(request: web.Request) -> web.Response:
|
async def query(request: web.Request) -> web.Response:
|
||||||
instance_id = request.match_info.get("id", "")
|
instance_id = request.match_info["id"].lower()
|
||||||
instance = PluginInstance.get(instance_id, None)
|
instance = await PluginInstance.get(instance_id)
|
||||||
if not instance:
|
if not instance:
|
||||||
return resp.instance_not_found
|
return resp.instance_not_found
|
||||||
elif not instance.inst_db:
|
elif not instance.inst_db:
|
||||||
@ -96,28 +83,76 @@ async def query(request: web.Request) -> web.Response:
|
|||||||
sql_query = data["query"]
|
sql_query = data["query"]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
return resp.query_missing
|
return resp.query_missing
|
||||||
return execute_query(instance, sql_query,
|
rows_as_dict = data.get("rows_as_dict", False)
|
||||||
rows_as_dict=data.get("rows_as_dict", False))
|
if isinstance(instance.inst_db, Engine):
|
||||||
|
return _execute_query_sqlalchemy(instance, sql_query, rows_as_dict)
|
||||||
|
elif isinstance(instance.inst_db, Database):
|
||||||
def execute_query(instance: PluginInstance, sql_query: Union[str, Query],
|
|
||||||
rows_as_dict: bool = False) -> web.Response:
|
|
||||||
try:
|
try:
|
||||||
res: ResultProxy = instance.inst_db.execute(sql_query)
|
return await _execute_query_asyncpg(instance, sql_query, rows_as_dict)
|
||||||
except exc.IntegrityError as e:
|
except (PostgresError, aiosqlite.Error) as e:
|
||||||
|
return resp.sql_error(e, sql_query)
|
||||||
|
else:
|
||||||
|
return resp.unsupported_plugin_database
|
||||||
|
|
||||||
|
|
||||||
|
def check_type(val):
|
||||||
|
if isinstance(val, datetime):
|
||||||
|
return val.isoformat()
|
||||||
|
return val
|
||||||
|
|
||||||
|
|
||||||
|
async def _execute_query_asyncpg(
|
||||||
|
instance: PluginInstance, sql_query: str, rows_as_dict: bool = False
|
||||||
|
) -> web.Response:
|
||||||
|
data = {"ok": True, "query": sql_query}
|
||||||
|
if sql_query.upper().startswith("SELECT"):
|
||||||
|
res = await instance.inst_db.fetch(sql_query)
|
||||||
|
data["rows"] = [
|
||||||
|
(
|
||||||
|
{key: check_type(value) for key, value in row.items()}
|
||||||
|
if rows_as_dict
|
||||||
|
else [check_type(value) for value in row]
|
||||||
|
)
|
||||||
|
for row in res
|
||||||
|
]
|
||||||
|
if len(res) > 0:
|
||||||
|
# TODO can we find column names when there are no rows?
|
||||||
|
data["columns"] = list(res[0].keys())
|
||||||
|
else:
|
||||||
|
res = await instance.inst_db.execute(sql_query)
|
||||||
|
if isinstance(res, str):
|
||||||
|
data["status_msg"] = res
|
||||||
|
elif isinstance(res, aiosqlite.Cursor):
|
||||||
|
data["rowcount"] = res.rowcount
|
||||||
|
# data["inserted_primary_key"] = res.lastrowid
|
||||||
|
else:
|
||||||
|
data["status_msg"] = "unknown status"
|
||||||
|
return web.json_response(data)
|
||||||
|
|
||||||
|
|
||||||
|
def _execute_query_sqlalchemy(
|
||||||
|
instance: PluginInstance, sql_query: str, rows_as_dict: bool = False
|
||||||
|
) -> web.Response:
|
||||||
|
assert isinstance(instance.inst_db, Engine)
|
||||||
|
try:
|
||||||
|
res = instance.inst_db.execute(sql_query)
|
||||||
|
except IntegrityError as e:
|
||||||
return resp.sql_integrity_error(e, sql_query)
|
return resp.sql_integrity_error(e, sql_query)
|
||||||
except exc.OperationalError as e:
|
except OperationalError as e:
|
||||||
return resp.sql_operational_error(e, sql_query)
|
return resp.sql_operational_error(e, sql_query)
|
||||||
data = {
|
data = {
|
||||||
"ok": True,
|
"ok": True,
|
||||||
"query": str(sql_query),
|
"query": str(sql_query),
|
||||||
}
|
}
|
||||||
if res.returns_rows:
|
if res.returns_rows:
|
||||||
row: RowProxy
|
data["rows"] = [
|
||||||
data["rows"] = [({key: check_type(value) for key, value in row.items()}
|
(
|
||||||
|
{key: check_type(value) for key, value in row.items()}
|
||||||
if rows_as_dict
|
if rows_as_dict
|
||||||
else [check_type(value) for value in row])
|
else [check_type(value) for value in row]
|
||||||
for row in res]
|
)
|
||||||
|
for row in res
|
||||||
|
]
|
||||||
data["columns"] = res.keys()
|
data["columns"] = res.keys()
|
||||||
else:
|
else:
|
||||||
data["rowcount"] = res.rowcount
|
data["rowcount"] = res.rowcount
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# maubot - A plugin-based Matrix bot system.
|
# maubot - A plugin-based Matrix bot system.
|
||||||
# Copyright (C) 2019 Tulir Asokan
|
# Copyright (C) 2022 Tulir Asokan
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
@ -13,31 +13,63 @@
|
|||||||
#
|
#
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
from typing import Deque, List
|
from __future__ import annotations
|
||||||
from datetime import datetime
|
|
||||||
from collections import deque
|
from collections import deque
|
||||||
import logging
|
from datetime import datetime
|
||||||
import asyncio
|
import asyncio
|
||||||
|
import logging
|
||||||
|
|
||||||
from aiohttp import web
|
from aiohttp import web, web_ws
|
||||||
|
|
||||||
|
from mautrix.util import background_task
|
||||||
|
|
||||||
from .base import routes, get_loop
|
|
||||||
from .auth import is_valid_token
|
from .auth import is_valid_token
|
||||||
|
from .base import routes
|
||||||
|
|
||||||
BUILTIN_ATTRS = {"args", "asctime", "created", "exc_info", "exc_text", "filename", "funcName",
|
BUILTIN_ATTRS = {
|
||||||
"levelname", "levelno", "lineno", "module", "msecs", "message", "msg", "name",
|
"args",
|
||||||
"pathname", "process", "processName", "relativeCreated", "stack_info", "thread",
|
"asctime",
|
||||||
"threadName"}
|
"created",
|
||||||
INCLUDE_ATTRS = {"filename", "funcName", "levelname", "levelno", "lineno", "module", "name",
|
"exc_info",
|
||||||
"pathname"}
|
"exc_text",
|
||||||
|
"filename",
|
||||||
|
"funcName",
|
||||||
|
"levelname",
|
||||||
|
"levelno",
|
||||||
|
"lineno",
|
||||||
|
"module",
|
||||||
|
"msecs",
|
||||||
|
"message",
|
||||||
|
"msg",
|
||||||
|
"name",
|
||||||
|
"pathname",
|
||||||
|
"process",
|
||||||
|
"processName",
|
||||||
|
"relativeCreated",
|
||||||
|
"stack_info",
|
||||||
|
"thread",
|
||||||
|
"threadName",
|
||||||
|
}
|
||||||
|
INCLUDE_ATTRS = {
|
||||||
|
"filename",
|
||||||
|
"funcName",
|
||||||
|
"levelname",
|
||||||
|
"levelno",
|
||||||
|
"lineno",
|
||||||
|
"module",
|
||||||
|
"name",
|
||||||
|
"pathname",
|
||||||
|
}
|
||||||
EXCLUDE_ATTRS = BUILTIN_ATTRS - INCLUDE_ATTRS
|
EXCLUDE_ATTRS = BUILTIN_ATTRS - INCLUDE_ATTRS
|
||||||
MAX_LINES = 2048
|
MAX_LINES = 2048
|
||||||
|
|
||||||
|
|
||||||
class LogCollector(logging.Handler):
|
class LogCollector(logging.Handler):
|
||||||
lines: Deque[dict]
|
lines: deque[dict]
|
||||||
formatter: logging.Formatter
|
formatter: logging.Formatter
|
||||||
listeners: List[web.WebSocketResponse]
|
listeners: list[web.WebSocketResponse]
|
||||||
|
loop: asyncio.AbstractEventLoop
|
||||||
|
|
||||||
def __init__(self, level=logging.NOTSET) -> None:
|
def __init__(self, level=logging.NOTSET) -> None:
|
||||||
super().__init__(level)
|
super().__init__(level)
|
||||||
@ -55,9 +87,7 @@ class LogCollector(logging.Handler):
|
|||||||
# JSON conversion based on Marsel Mavletkulov's json-log-formatter (MIT license)
|
# JSON conversion based on Marsel Mavletkulov's json-log-formatter (MIT license)
|
||||||
# https://github.com/marselester/json-log-formatter
|
# https://github.com/marselester/json-log-formatter
|
||||||
content = {
|
content = {
|
||||||
name: value
|
name: value for name, value in record.__dict__.items() if name not in EXCLUDE_ATTRS
|
||||||
for name, value in record.__dict__.items()
|
|
||||||
if name not in EXCLUDE_ATTRS
|
|
||||||
}
|
}
|
||||||
content["id"] = str(record.relativeCreated)
|
content["id"] = str(record.relativeCreated)
|
||||||
content["msg"] = record.getMessage()
|
content["msg"] = record.getMessage()
|
||||||
@ -69,7 +99,7 @@ class LogCollector(logging.Handler):
|
|||||||
for name, value in content.items():
|
for name, value in content.items():
|
||||||
if isinstance(value, datetime):
|
if isinstance(value, datetime):
|
||||||
content[name] = value.astimezone().isoformat()
|
content[name] = value.astimezone().isoformat()
|
||||||
asyncio.ensure_future(self.send(content))
|
asyncio.run_coroutine_threadsafe(self.send(content), loop=self.loop)
|
||||||
self.lines.append(content)
|
self.lines.append(content)
|
||||||
|
|
||||||
async def send(self, record: dict) -> None:
|
async def send(self, record: dict) -> None:
|
||||||
@ -81,17 +111,18 @@ class LogCollector(logging.Handler):
|
|||||||
|
|
||||||
|
|
||||||
handler = LogCollector()
|
handler = LogCollector()
|
||||||
log_root = logging.getLogger("maubot")
|
|
||||||
log = logging.getLogger("maubot.server.websocket")
|
log = logging.getLogger("maubot.server.websocket")
|
||||||
sockets = []
|
sockets = []
|
||||||
|
|
||||||
|
|
||||||
def init() -> None:
|
def init(loop: asyncio.AbstractEventLoop) -> None:
|
||||||
log_root.addHandler(handler)
|
logging.root.addHandler(handler)
|
||||||
|
handler.loop = loop
|
||||||
|
|
||||||
|
|
||||||
async def stop_all() -> None:
|
async def stop_all() -> None:
|
||||||
log_root.removeHandler(handler)
|
log.debug("Closing log listener websockets")
|
||||||
|
logging.root.removeHandler(handler)
|
||||||
for socket in sockets:
|
for socket in sockets:
|
||||||
try:
|
try:
|
||||||
await socket.close(code=1012)
|
await socket.close(code=1012)
|
||||||
@ -108,14 +139,15 @@ async def log_websocket(request: web.Request) -> web.WebSocketResponse:
|
|||||||
authenticated = False
|
authenticated = False
|
||||||
|
|
||||||
async def close_if_not_authenticated():
|
async def close_if_not_authenticated():
|
||||||
await asyncio.sleep(5, loop=get_loop())
|
await asyncio.sleep(5)
|
||||||
if not authenticated:
|
if not authenticated:
|
||||||
await ws.close(code=4000)
|
await ws.close(code=4000)
|
||||||
log.debug(f"Connection from {request.remote} terminated due to no authentication")
|
log.debug(f"Connection from {request.remote} terminated due to no authentication")
|
||||||
|
|
||||||
asyncio.ensure_future(close_if_not_authenticated())
|
background_task.create(close_if_not_authenticated())
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
msg: web_ws.WSMessage
|
||||||
async for msg in ws:
|
async for msg in ws:
|
||||||
if msg.type != web.WSMsgType.TEXT:
|
if msg.type != web.WSMsgType.TEXT:
|
||||||
continue
|
continue
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# maubot - A plugin-based Matrix bot system.
|
# maubot - A plugin-based Matrix bot system.
|
||||||
# Copyright (C) 2019 Tulir Asokan
|
# Copyright (C) 2022 Tulir Asokan
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
@ -17,9 +17,10 @@ import json
|
|||||||
|
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
|
|
||||||
from .base import routes, get_config
|
|
||||||
from .responses import resp
|
|
||||||
from .auth import create_token
|
from .auth import create_token
|
||||||
|
from .base import get_config, routes
|
||||||
|
from .responses import resp
|
||||||
|
|
||||||
|
|
||||||
@routes.post("/auth/login")
|
@routes.post("/auth/login")
|
||||||
async def login(request: web.Request) -> web.Response:
|
async def login(request: web.Request) -> web.Response:
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# maubot - A plugin-based Matrix bot system.
|
# maubot - A plugin-based Matrix bot system.
|
||||||
# Copyright (C) 2019 Tulir Asokan
|
# Copyright (C) 2022 Tulir Asokan
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
@ -13,14 +13,15 @@
|
|||||||
#
|
#
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
from typing import Callable, Awaitable
|
from typing import Awaitable, Callable
|
||||||
|
import base64
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
|
|
||||||
from .responses import resp
|
|
||||||
from .auth import check_token
|
from .auth import check_token
|
||||||
from .base import get_config
|
from .base import get_config
|
||||||
|
from .responses import resp
|
||||||
|
|
||||||
Handler = Callable[[web.Request], Awaitable[web.Response]]
|
Handler = Callable[[web.Request], Awaitable[web.Response]]
|
||||||
log = logging.getLogger("maubot.server")
|
log = logging.getLogger("maubot.server")
|
||||||
@ -28,8 +29,13 @@ log = logging.getLogger("maubot.server")
|
|||||||
|
|
||||||
@web.middleware
|
@web.middleware
|
||||||
async def auth(request: web.Request, handler: Handler) -> web.Response:
|
async def auth(request: web.Request, handler: Handler) -> web.Response:
|
||||||
subpath = request.path[len(get_config()["server.base_path"]):]
|
subpath = request.path[len("/_matrix/maubot/v1") :]
|
||||||
if subpath.startswith("/auth/") or subpath == "/features" or subpath == "/logs":
|
if (
|
||||||
|
subpath.startswith("/auth/")
|
||||||
|
or subpath.startswith("/client/auth_external_sso/complete/")
|
||||||
|
or subpath == "/features"
|
||||||
|
or subpath == "/logs"
|
||||||
|
):
|
||||||
return await handler(request)
|
return await handler(request)
|
||||||
err = check_token(request)
|
err = check_token(request)
|
||||||
if err is not None:
|
if err is not None:
|
||||||
@ -46,10 +52,18 @@ async def error(request: web.Request, handler: Handler) -> web.Response:
|
|||||||
return resp.path_not_found
|
return resp.path_not_found
|
||||||
elif ex.status_code == 405:
|
elif ex.status_code == 405:
|
||||||
return resp.method_not_allowed
|
return resp.method_not_allowed
|
||||||
return web.json_response({
|
return web.json_response(
|
||||||
"error": f"Unhandled HTTP {ex.status}",
|
{
|
||||||
|
"httpexception": {
|
||||||
|
"headers": {key: value for key, value in ex.headers.items()},
|
||||||
|
"class": type(ex).__name__,
|
||||||
|
"body": ex.text or base64.b64encode(ex.body),
|
||||||
|
},
|
||||||
|
"error": f"Unhandled HTTP {ex.status}: {ex.text[:128] or 'non-text response'}",
|
||||||
"errcode": f"unhandled_http_{ex.status}",
|
"errcode": f"unhandled_http_{ex.status}",
|
||||||
}, status=ex.status)
|
},
|
||||||
|
status=ex.status,
|
||||||
|
)
|
||||||
except Exception:
|
except Exception:
|
||||||
log.exception("Error in handler")
|
log.exception("Error in handler")
|
||||||
return resp.internal_server_error
|
return resp.internal_server_error
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# maubot - A plugin-based Matrix bot system.
|
# maubot - A plugin-based Matrix bot system.
|
||||||
# Copyright (C) 2019 Tulir Asokan
|
# Copyright (C) 2022 Tulir Asokan
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
@ -17,9 +17,9 @@ import traceback
|
|||||||
|
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
|
|
||||||
from ...loader import PluginLoader, MaubotZipImportError
|
from ...loader import MaubotZipImportError, PluginLoader
|
||||||
from .responses import resp
|
|
||||||
from .base import routes
|
from .base import routes
|
||||||
|
from .responses import resp
|
||||||
|
|
||||||
|
|
||||||
@routes.get("/plugins")
|
@routes.get("/plugins")
|
||||||
@ -29,8 +29,8 @@ async def get_plugins(_) -> web.Response:
|
|||||||
|
|
||||||
@routes.get("/plugin/{id}")
|
@routes.get("/plugin/{id}")
|
||||||
async def get_plugin(request: web.Request) -> web.Response:
|
async def get_plugin(request: web.Request) -> web.Response:
|
||||||
plugin_id = request.match_info.get("id", None)
|
plugin_id = request.match_info["id"]
|
||||||
plugin = PluginLoader.id_cache.get(plugin_id, None)
|
plugin = PluginLoader.id_cache.get(plugin_id)
|
||||||
if not plugin:
|
if not plugin:
|
||||||
return resp.plugin_not_found
|
return resp.plugin_not_found
|
||||||
return resp.found(plugin.to_dict())
|
return resp.found(plugin.to_dict())
|
||||||
@ -38,8 +38,8 @@ async def get_plugin(request: web.Request) -> web.Response:
|
|||||||
|
|
||||||
@routes.delete("/plugin/{id}")
|
@routes.delete("/plugin/{id}")
|
||||||
async def delete_plugin(request: web.Request) -> web.Response:
|
async def delete_plugin(request: web.Request) -> web.Response:
|
||||||
plugin_id = request.match_info.get("id", None)
|
plugin_id = request.match_info["id"]
|
||||||
plugin = PluginLoader.id_cache.get(plugin_id, None)
|
plugin = PluginLoader.id_cache.get(plugin_id)
|
||||||
if not plugin:
|
if not plugin:
|
||||||
return resp.plugin_not_found
|
return resp.plugin_not_found
|
||||||
elif len(plugin.references) > 0:
|
elif len(plugin.references) > 0:
|
||||||
@ -50,8 +50,8 @@ async def delete_plugin(request: web.Request) -> web.Response:
|
|||||||
|
|
||||||
@routes.post("/plugin/{id}/reload")
|
@routes.post("/plugin/{id}/reload")
|
||||||
async def reload_plugin(request: web.Request) -> web.Response:
|
async def reload_plugin(request: web.Request) -> web.Response:
|
||||||
plugin_id = request.match_info.get("id", None)
|
plugin_id = request.match_info["id"]
|
||||||
plugin = PluginLoader.id_cache.get(plugin_id, None)
|
plugin = PluginLoader.id_cache.get(plugin_id)
|
||||||
if not plugin:
|
if not plugin:
|
||||||
return resp.plugin_not_found
|
return resp.plugin_not_found
|
||||||
|
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# maubot - A plugin-based Matrix bot system.
|
# maubot - A plugin-based Matrix bot system.
|
||||||
# Copyright (C) 2019 Tulir Asokan
|
# Copyright (C) 2022 Tulir Asokan
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
@ -15,27 +15,39 @@
|
|||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
from io import BytesIO
|
from io import BytesIO
|
||||||
from time import time
|
from time import time
|
||||||
import traceback
|
import logging
|
||||||
import os.path
|
import os.path
|
||||||
import re
|
import re
|
||||||
|
import traceback
|
||||||
|
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
from packaging.version import Version
|
from packaging.version import Version
|
||||||
|
|
||||||
from ...loader import PluginLoader, ZippedPluginLoader, MaubotZipImportError
|
from ...loader import DatabaseType, MaubotZipImportError, PluginLoader, ZippedPluginLoader
|
||||||
|
from .base import get_config, routes
|
||||||
from .responses import resp
|
from .responses import resp
|
||||||
from .base import routes, get_config
|
|
||||||
|
try:
|
||||||
|
import sqlalchemy
|
||||||
|
|
||||||
|
has_alchemy = True
|
||||||
|
except ImportError:
|
||||||
|
has_alchemy = False
|
||||||
|
|
||||||
|
log = logging.getLogger("maubot.server.upload")
|
||||||
|
|
||||||
|
|
||||||
@routes.put("/plugin/{id}")
|
@routes.put("/plugin/{id}")
|
||||||
async def put_plugin(request: web.Request) -> web.Response:
|
async def put_plugin(request: web.Request) -> web.Response:
|
||||||
plugin_id = request.match_info.get("id", None)
|
plugin_id = request.match_info["id"]
|
||||||
content = await request.read()
|
content = await request.read()
|
||||||
file = BytesIO(content)
|
file = BytesIO(content)
|
||||||
try:
|
try:
|
||||||
pid, version = ZippedPluginLoader.verify_meta(file)
|
pid, version, db_type = ZippedPluginLoader.verify_meta(file)
|
||||||
except MaubotZipImportError as e:
|
except MaubotZipImportError as e:
|
||||||
return resp.plugin_import_error(str(e), traceback.format_exc())
|
return resp.plugin_import_error(str(e), traceback.format_exc())
|
||||||
|
if db_type == DatabaseType.SQLALCHEMY and not has_alchemy:
|
||||||
|
return resp.sqlalchemy_not_installed
|
||||||
if pid != plugin_id:
|
if pid != plugin_id:
|
||||||
return resp.pid_mismatch
|
return resp.pid_mismatch
|
||||||
plugin = PluginLoader.id_cache.get(plugin_id, None)
|
plugin = PluginLoader.id_cache.get(plugin_id, None)
|
||||||
@ -52,9 +64,11 @@ async def upload_plugin(request: web.Request) -> web.Response:
|
|||||||
content = await request.read()
|
content = await request.read()
|
||||||
file = BytesIO(content)
|
file = BytesIO(content)
|
||||||
try:
|
try:
|
||||||
pid, version = ZippedPluginLoader.verify_meta(file)
|
pid, version, db_type = ZippedPluginLoader.verify_meta(file)
|
||||||
except MaubotZipImportError as e:
|
except MaubotZipImportError as e:
|
||||||
return resp.plugin_import_error(str(e), traceback.format_exc())
|
return resp.plugin_import_error(str(e), traceback.format_exc())
|
||||||
|
if db_type == DatabaseType.SQLALCHEMY and not has_alchemy:
|
||||||
|
return resp.sqlalchemy_not_installed
|
||||||
plugin = PluginLoader.id_cache.get(pid, None)
|
plugin = PluginLoader.id_cache.get(pid, None)
|
||||||
if not plugin:
|
if not plugin:
|
||||||
return await upload_new_plugin(content, pid, version)
|
return await upload_new_plugin(content, pid, version)
|
||||||
@ -78,15 +92,20 @@ async def upload_new_plugin(content: bytes, pid: str, version: Version) -> web.R
|
|||||||
return resp.created(plugin.to_dict())
|
return resp.created(plugin.to_dict())
|
||||||
|
|
||||||
|
|
||||||
async def upload_replacement_plugin(plugin: ZippedPluginLoader, content: bytes,
|
async def upload_replacement_plugin(
|
||||||
new_version: Version) -> web.Response:
|
plugin: ZippedPluginLoader, content: bytes, new_version: Version
|
||||||
|
) -> web.Response:
|
||||||
dirname = os.path.dirname(plugin.path)
|
dirname = os.path.dirname(plugin.path)
|
||||||
old_filename = os.path.basename(plugin.path)
|
old_filename = os.path.basename(plugin.path)
|
||||||
if str(plugin.meta.version) in old_filename:
|
if str(plugin.meta.version) in old_filename:
|
||||||
replacement = (str(new_version) if plugin.meta.version != new_version
|
replacement = (
|
||||||
else f"{new_version}-ts{int(time())}")
|
str(new_version)
|
||||||
filename = re.sub(f"{re.escape(str(plugin.meta.version))}(-ts[0-9]+)?",
|
if plugin.meta.version != new_version
|
||||||
replacement, old_filename)
|
else f"{new_version}-ts{int(time() * 1000)}"
|
||||||
|
)
|
||||||
|
filename = re.sub(
|
||||||
|
f"{re.escape(str(plugin.meta.version))}(-ts[0-9]+)?", replacement, old_filename
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
filename = old_filename.rstrip(".mbp")
|
filename = old_filename.rstrip(".mbp")
|
||||||
filename = f"{filename}-v{new_version}.mbp"
|
filename = f"{filename}-v{new_version}.mbp"
|
||||||
@ -98,12 +117,29 @@ async def upload_replacement_plugin(plugin: ZippedPluginLoader, content: bytes,
|
|||||||
try:
|
try:
|
||||||
await plugin.reload(new_path=path)
|
await plugin.reload(new_path=path)
|
||||||
except MaubotZipImportError as e:
|
except MaubotZipImportError as e:
|
||||||
|
log.exception(f"Error loading updated version of {plugin.meta.id}, rolling back")
|
||||||
try:
|
try:
|
||||||
await plugin.reload(new_path=old_path)
|
await plugin.reload(new_path=old_path)
|
||||||
await plugin.start_instances()
|
await plugin.start_instances()
|
||||||
except MaubotZipImportError:
|
except MaubotZipImportError:
|
||||||
pass
|
log.warning(f"Failed to roll back update of {plugin.meta.id}", exc_info=True)
|
||||||
|
finally:
|
||||||
|
ZippedPluginLoader.trash(path, reason="failed_update")
|
||||||
return resp.plugin_import_error(str(e), traceback.format_exc())
|
return resp.plugin_import_error(str(e), traceback.format_exc())
|
||||||
|
try:
|
||||||
await plugin.start_instances()
|
await plugin.start_instances()
|
||||||
|
except Exception as e:
|
||||||
|
log.exception(f"Error starting {plugin.meta.id} instances after update, rolling back")
|
||||||
|
try:
|
||||||
|
await plugin.stop_instances()
|
||||||
|
await plugin.reload(new_path=old_path)
|
||||||
|
await plugin.start_instances()
|
||||||
|
except Exception:
|
||||||
|
log.warning(f"Failed to roll back update of {plugin.meta.id}", exc_info=True)
|
||||||
|
finally:
|
||||||
|
ZippedPluginLoader.trash(path, reason="failed_update")
|
||||||
|
return resp.plugin_reload_error(str(e), traceback.format_exc())
|
||||||
|
|
||||||
|
log.debug(f"Successfully updated {plugin.meta.id}, moving old version to trash")
|
||||||
ZippedPluginLoader.trash(old_path, reason="update")
|
ZippedPluginLoader.trash(old_path, reason="update")
|
||||||
return resp.updated(plugin.to_dict())
|
return resp.updated(plugin.to_dict())
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# maubot - A plugin-based Matrix bot system.
|
# maubot - A plugin-based Matrix bot system.
|
||||||
# Copyright (C) 2019 Tulir Asokan
|
# Copyright (C) 2022 Tulir Asokan
|
||||||
#
|
#
|
||||||
# This program is free software: you can redistribute it and/or modify
|
# This program is free software: you can redistribute it and/or modify
|
||||||
# it under the terms of the GNU Affero General Public License as published by
|
# it under the terms of the GNU Affero General Public License as published by
|
||||||
@ -13,271 +13,457 @@
|
|||||||
#
|
#
|
||||||
# You should have received a copy of the GNU Affero General Public License
|
# You should have received a copy of the GNU Affero General Public License
|
||||||
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
# along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
from typing import TYPE_CHECKING
|
||||||
from http import HTTPStatus
|
from http import HTTPStatus
|
||||||
|
|
||||||
from aiohttp import web
|
from aiohttp import web
|
||||||
from sqlalchemy.exc import OperationalError, IntegrityError
|
from asyncpg import PostgresError
|
||||||
|
import aiosqlite
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from sqlalchemy.exc import IntegrityError, OperationalError
|
||||||
|
|
||||||
|
|
||||||
class _Response:
|
class _Response:
|
||||||
@property
|
@property
|
||||||
def body_not_json(self) -> web.Response:
|
def body_not_json(self) -> web.Response:
|
||||||
return web.json_response({
|
return web.json_response(
|
||||||
|
{
|
||||||
"error": "Request body is not JSON",
|
"error": "Request body is not JSON",
|
||||||
"errcode": "body_not_json",
|
"errcode": "body_not_json",
|
||||||
}, status=HTTPStatus.BAD_REQUEST)
|
},
|
||||||
|
status=HTTPStatus.BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def plugin_type_required(self) -> web.Response:
|
def plugin_type_required(self) -> web.Response:
|
||||||
return web.json_response({
|
return web.json_response(
|
||||||
|
{
|
||||||
"error": "Plugin type is required when creating plugin instances",
|
"error": "Plugin type is required when creating plugin instances",
|
||||||
"errcode": "plugin_type_required",
|
"errcode": "plugin_type_required",
|
||||||
}, status=HTTPStatus.BAD_REQUEST)
|
},
|
||||||
|
status=HTTPStatus.BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def primary_user_required(self) -> web.Response:
|
def primary_user_required(self) -> web.Response:
|
||||||
return web.json_response({
|
return web.json_response(
|
||||||
|
{
|
||||||
"error": "Primary user is required when creating plugin instances",
|
"error": "Primary user is required when creating plugin instances",
|
||||||
"errcode": "primary_user_required",
|
"errcode": "primary_user_required",
|
||||||
}, status=HTTPStatus.BAD_REQUEST)
|
},
|
||||||
|
status=HTTPStatus.BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def bad_client_access_token(self) -> web.Response:
|
def bad_client_access_token(self) -> web.Response:
|
||||||
return web.json_response({
|
return web.json_response(
|
||||||
|
{
|
||||||
"error": "Invalid access token",
|
"error": "Invalid access token",
|
||||||
"errcode": "bad_client_access_token",
|
"errcode": "bad_client_access_token",
|
||||||
}, status=HTTPStatus.BAD_REQUEST)
|
},
|
||||||
|
status=HTTPStatus.BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def bad_client_access_details(self) -> web.Response:
|
def bad_client_access_details(self) -> web.Response:
|
||||||
return web.json_response({
|
return web.json_response(
|
||||||
|
{
|
||||||
"error": "Invalid homeserver or access token",
|
"error": "Invalid homeserver or access token",
|
||||||
"errcode": "bad_client_access_details"
|
"errcode": "bad_client_access_details",
|
||||||
}, status=HTTPStatus.BAD_REQUEST)
|
},
|
||||||
|
status=HTTPStatus.BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def bad_client_connection_details(self) -> web.Response:
|
def bad_client_connection_details(self) -> web.Response:
|
||||||
return web.json_response({
|
return web.json_response(
|
||||||
|
{
|
||||||
"error": "Could not connect to homeserver",
|
"error": "Could not connect to homeserver",
|
||||||
"errcode": "bad_client_connection_details"
|
"errcode": "bad_client_connection_details",
|
||||||
}, status=HTTPStatus.BAD_REQUEST)
|
},
|
||||||
|
status=HTTPStatus.BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
def mxid_mismatch(self, found: str) -> web.Response:
|
def mxid_mismatch(self, found: str) -> web.Response:
|
||||||
return web.json_response({
|
return web.json_response(
|
||||||
"error": "The Matrix user ID of the client and the user ID of the access token don't "
|
{
|
||||||
f"match. Access token is for user {found}",
|
"error": (
|
||||||
|
"The Matrix user ID of the client and the user ID of the access token don't "
|
||||||
|
f"match. Access token is for user {found}"
|
||||||
|
),
|
||||||
"errcode": "mxid_mismatch",
|
"errcode": "mxid_mismatch",
|
||||||
}, status=HTTPStatus.BAD_REQUEST)
|
},
|
||||||
|
status=HTTPStatus.BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
def device_id_mismatch(self, found: str) -> web.Response:
|
||||||
|
return web.json_response(
|
||||||
|
{
|
||||||
|
"error": (
|
||||||
|
"The Matrix device ID of the client and the device ID of the access token "
|
||||||
|
f"don't match. Access token is for device {found}"
|
||||||
|
),
|
||||||
|
"errcode": "mxid_mismatch",
|
||||||
|
},
|
||||||
|
status=HTTPStatus.BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def pid_mismatch(self) -> web.Response:
|
def pid_mismatch(self) -> web.Response:
|
||||||
return web.json_response({
|
return web.json_response(
|
||||||
|
{
|
||||||
"error": "The ID in the path does not match the ID of the uploaded plugin",
|
"error": "The ID in the path does not match the ID of the uploaded plugin",
|
||||||
"errcode": "pid_mismatch",
|
"errcode": "pid_mismatch",
|
||||||
}, status=HTTPStatus.BAD_REQUEST)
|
},
|
||||||
|
status=HTTPStatus.BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def username_or_password_missing(self) -> web.Response:
|
def username_or_password_missing(self) -> web.Response:
|
||||||
return web.json_response({
|
return web.json_response(
|
||||||
|
{
|
||||||
"error": "Username or password missing",
|
"error": "Username or password missing",
|
||||||
"errcode": "username_or_password_missing",
|
"errcode": "username_or_password_missing",
|
||||||
}, status=HTTPStatus.BAD_REQUEST)
|
},
|
||||||
|
status=HTTPStatus.BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def query_missing(self) -> web.Response:
|
def query_missing(self) -> web.Response:
|
||||||
return web.json_response({
|
return web.json_response(
|
||||||
|
{
|
||||||
"error": "Query missing",
|
"error": "Query missing",
|
||||||
"errcode": "query_missing",
|
"errcode": "query_missing",
|
||||||
}, status=HTTPStatus.BAD_REQUEST)
|
},
|
||||||
|
status=HTTPStatus.BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def sql_error(error: PostgresError | aiosqlite.Error, query: str) -> web.Response:
|
||||||
|
return web.json_response(
|
||||||
|
{
|
||||||
|
"ok": False,
|
||||||
|
"query": query,
|
||||||
|
"error": str(error),
|
||||||
|
"errcode": "sql_error",
|
||||||
|
},
|
||||||
|
status=HTTPStatus.BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def sql_operational_error(error: OperationalError, query: str) -> web.Response:
|
def sql_operational_error(error: OperationalError, query: str) -> web.Response:
|
||||||
return web.json_response({
|
return web.json_response(
|
||||||
|
{
|
||||||
"ok": False,
|
"ok": False,
|
||||||
"query": query,
|
"query": query,
|
||||||
"error": str(error.orig),
|
"error": str(error.orig),
|
||||||
"full_error": str(error),
|
"full_error": str(error),
|
||||||
"errcode": "sql_operational_error",
|
"errcode": "sql_operational_error",
|
||||||
}, status=HTTPStatus.BAD_REQUEST)
|
},
|
||||||
|
status=HTTPStatus.BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def sql_integrity_error(error: IntegrityError, query: str) -> web.Response:
|
def sql_integrity_error(error: IntegrityError, query: str) -> web.Response:
|
||||||
return web.json_response({
|
return web.json_response(
|
||||||
|
{
|
||||||
"ok": False,
|
"ok": False,
|
||||||
"query": query,
|
"query": query,
|
||||||
"error": str(error.orig),
|
"error": str(error.orig),
|
||||||
"full_error": str(error),
|
"full_error": str(error),
|
||||||
"errcode": "sql_integrity_error",
|
"errcode": "sql_integrity_error",
|
||||||
}, status=HTTPStatus.BAD_REQUEST)
|
},
|
||||||
|
status=HTTPStatus.BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def bad_auth(self) -> web.Response:
|
def bad_auth(self) -> web.Response:
|
||||||
return web.json_response({
|
return web.json_response(
|
||||||
|
{
|
||||||
"error": "Invalid username or password",
|
"error": "Invalid username or password",
|
||||||
"errcode": "invalid_auth",
|
"errcode": "invalid_auth",
|
||||||
}, status=HTTPStatus.UNAUTHORIZED)
|
},
|
||||||
|
status=HTTPStatus.UNAUTHORIZED,
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def no_token(self) -> web.Response:
|
def no_token(self) -> web.Response:
|
||||||
return web.json_response({
|
return web.json_response(
|
||||||
|
{
|
||||||
"error": "Authorization token missing",
|
"error": "Authorization token missing",
|
||||||
"errcode": "auth_token_missing",
|
"errcode": "auth_token_missing",
|
||||||
}, status=HTTPStatus.UNAUTHORIZED)
|
},
|
||||||
|
status=HTTPStatus.UNAUTHORIZED,
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def invalid_token(self) -> web.Response:
|
def invalid_token(self) -> web.Response:
|
||||||
return web.json_response({
|
return web.json_response(
|
||||||
|
{
|
||||||
"error": "Invalid authorization token",
|
"error": "Invalid authorization token",
|
||||||
"errcode": "auth_token_invalid",
|
"errcode": "auth_token_invalid",
|
||||||
}, status=HTTPStatus.UNAUTHORIZED)
|
},
|
||||||
|
status=HTTPStatus.UNAUTHORIZED,
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def plugin_not_found(self) -> web.Response:
|
def plugin_not_found(self) -> web.Response:
|
||||||
return web.json_response({
|
return web.json_response(
|
||||||
|
{
|
||||||
"error": "Plugin not found",
|
"error": "Plugin not found",
|
||||||
"errcode": "plugin_not_found",
|
"errcode": "plugin_not_found",
|
||||||
}, status=HTTPStatus.NOT_FOUND)
|
},
|
||||||
|
status=HTTPStatus.NOT_FOUND,
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def client_not_found(self) -> web.Response:
|
def client_not_found(self) -> web.Response:
|
||||||
return web.json_response({
|
return web.json_response(
|
||||||
|
{
|
||||||
"error": "Client not found",
|
"error": "Client not found",
|
||||||
"errcode": "client_not_found",
|
"errcode": "client_not_found",
|
||||||
}, status=HTTPStatus.NOT_FOUND)
|
},
|
||||||
|
status=HTTPStatus.NOT_FOUND,
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def primary_user_not_found(self) -> web.Response:
|
def primary_user_not_found(self) -> web.Response:
|
||||||
return web.json_response({
|
return web.json_response(
|
||||||
|
{
|
||||||
"error": "Client for given primary user not found",
|
"error": "Client for given primary user not found",
|
||||||
"errcode": "primary_user_not_found",
|
"errcode": "primary_user_not_found",
|
||||||
}, status=HTTPStatus.NOT_FOUND)
|
},
|
||||||
|
status=HTTPStatus.NOT_FOUND,
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def instance_not_found(self) -> web.Response:
|
def instance_not_found(self) -> web.Response:
|
||||||
return web.json_response({
|
return web.json_response(
|
||||||
|
{
|
||||||
"error": "Plugin instance not found",
|
"error": "Plugin instance not found",
|
||||||
"errcode": "instance_not_found",
|
"errcode": "instance_not_found",
|
||||||
}, status=HTTPStatus.NOT_FOUND)
|
},
|
||||||
|
status=HTTPStatus.NOT_FOUND,
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def plugin_type_not_found(self) -> web.Response:
|
def plugin_type_not_found(self) -> web.Response:
|
||||||
return web.json_response({
|
return web.json_response(
|
||||||
|
{
|
||||||
"error": "Given plugin type not found",
|
"error": "Given plugin type not found",
|
||||||
"errcode": "plugin_type_not_found",
|
"errcode": "plugin_type_not_found",
|
||||||
}, status=HTTPStatus.NOT_FOUND)
|
},
|
||||||
|
status=HTTPStatus.NOT_FOUND,
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def path_not_found(self) -> web.Response:
|
def path_not_found(self) -> web.Response:
|
||||||
return web.json_response({
|
return web.json_response(
|
||||||
|
{
|
||||||
"error": "Resource not found",
|
"error": "Resource not found",
|
||||||
"errcode": "resource_not_found",
|
"errcode": "resource_not_found",
|
||||||
}, status=HTTPStatus.NOT_FOUND)
|
},
|
||||||
|
status=HTTPStatus.NOT_FOUND,
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def server_not_found(self) -> web.Response:
|
def server_not_found(self) -> web.Response:
|
||||||
return web.json_response({
|
return web.json_response(
|
||||||
|
{
|
||||||
"error": "Registration target server not found",
|
"error": "Registration target server not found",
|
||||||
"errcode": "server_not_found",
|
"errcode": "server_not_found",
|
||||||
}, status=HTTPStatus.NOT_FOUND)
|
},
|
||||||
|
status=HTTPStatus.NOT_FOUND,
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def registration_secret_not_found(self) -> web.Response:
|
||||||
|
return web.json_response(
|
||||||
|
{
|
||||||
|
"error": "Config does not have a registration secret for that server",
|
||||||
|
"errcode": "registration_secret_not_found",
|
||||||
|
},
|
||||||
|
status=HTTPStatus.NOT_FOUND,
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def registration_no_sso(self) -> web.Response:
|
||||||
|
return web.json_response(
|
||||||
|
{
|
||||||
|
"error": "The register operation is only for registering with a password",
|
||||||
|
"errcode": "registration_no_sso",
|
||||||
|
},
|
||||||
|
status=HTTPStatus.BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def sso_not_supported(self) -> web.Response:
|
||||||
|
return web.json_response(
|
||||||
|
{
|
||||||
|
"error": "That server does not seem to support single sign-on",
|
||||||
|
"errcode": "sso_not_supported",
|
||||||
|
},
|
||||||
|
status=HTTPStatus.FORBIDDEN,
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def plugin_has_no_database(self) -> web.Response:
|
def plugin_has_no_database(self) -> web.Response:
|
||||||
return web.json_response({
|
return web.json_response(
|
||||||
|
{
|
||||||
"error": "Given plugin does not have a database",
|
"error": "Given plugin does not have a database",
|
||||||
"errcode": "plugin_has_no_database",
|
"errcode": "plugin_has_no_database",
|
||||||
})
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def unsupported_plugin_database(self) -> web.Response:
|
||||||
|
return web.json_response(
|
||||||
|
{
|
||||||
|
"error": "The database type is not supported by this API",
|
||||||
|
"errcode": "unsupported_plugin_database",
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def sqlalchemy_not_installed(self) -> web.Response:
|
||||||
|
return web.json_response(
|
||||||
|
{
|
||||||
|
"error": "This plugin requires a legacy database, but SQLAlchemy is not installed",
|
||||||
|
"errcode": "unsupported_plugin_database",
|
||||||
|
},
|
||||||
|
status=HTTPStatus.NOT_IMPLEMENTED,
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def table_not_found(self) -> web.Response:
|
def table_not_found(self) -> web.Response:
|
||||||
return web.json_response({
|
return web.json_response(
|
||||||
|
{
|
||||||
"error": "Given table not found in plugin database",
|
"error": "Given table not found in plugin database",
|
||||||
"errcode": "table_not_found",
|
"errcode": "table_not_found",
|
||||||
})
|
}
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def method_not_allowed(self) -> web.Response:
|
def method_not_allowed(self) -> web.Response:
|
||||||
return web.json_response({
|
return web.json_response(
|
||||||
|
{
|
||||||
"error": "Method not allowed",
|
"error": "Method not allowed",
|
||||||
"errcode": "method_not_allowed",
|
"errcode": "method_not_allowed",
|
||||||
}, status=HTTPStatus.METHOD_NOT_ALLOWED)
|
},
|
||||||
|
status=HTTPStatus.METHOD_NOT_ALLOWED,
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def user_exists(self) -> web.Response:
|
def user_exists(self) -> web.Response:
|
||||||
return web.json_response({
|
return web.json_response(
|
||||||
|
{
|
||||||
"error": "There is already a client with the user ID of that token",
|
"error": "There is already a client with the user ID of that token",
|
||||||
"errcode": "user_exists",
|
"errcode": "user_exists",
|
||||||
}, status=HTTPStatus.CONFLICT)
|
},
|
||||||
|
status=HTTPStatus.CONFLICT,
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def plugin_exists(self) -> web.Response:
|
def plugin_exists(self) -> web.Response:
|
||||||
return web.json_response({
|
return web.json_response(
|
||||||
|
{
|
||||||
"error": "A plugin with the same ID as the uploaded plugin already exists",
|
"error": "A plugin with the same ID as the uploaded plugin already exists",
|
||||||
"errcode": "plugin_exists"
|
"errcode": "plugin_exists",
|
||||||
}, status=HTTPStatus.CONFLICT)
|
},
|
||||||
|
status=HTTPStatus.CONFLICT,
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def plugin_in_use(self) -> web.Response:
|
def plugin_in_use(self) -> web.Response:
|
||||||
return web.json_response({
|
return web.json_response(
|
||||||
|
{
|
||||||
"error": "Plugin instances of this type still exist",
|
"error": "Plugin instances of this type still exist",
|
||||||
"errcode": "plugin_in_use",
|
"errcode": "plugin_in_use",
|
||||||
}, status=HTTPStatus.PRECONDITION_FAILED)
|
},
|
||||||
|
status=HTTPStatus.PRECONDITION_FAILED,
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def client_in_use(self) -> web.Response:
|
def client_in_use(self) -> web.Response:
|
||||||
return web.json_response({
|
return web.json_response(
|
||||||
|
{
|
||||||
"error": "Plugin instances with this client as their primary user still exist",
|
"error": "Plugin instances with this client as their primary user still exist",
|
||||||
"errcode": "client_in_use",
|
"errcode": "client_in_use",
|
||||||
}, status=HTTPStatus.PRECONDITION_FAILED)
|
},
|
||||||
|
status=HTTPStatus.PRECONDITION_FAILED,
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def plugin_import_error(error: str, stacktrace: str) -> web.Response:
|
def plugin_import_error(error: str, stacktrace: str) -> web.Response:
|
||||||
return web.json_response({
|
return web.json_response(
|
||||||
|
{
|
||||||
"error": error,
|
"error": error,
|
||||||
"stacktrace": stacktrace,
|
"stacktrace": stacktrace,
|
||||||
"errcode": "plugin_invalid",
|
"errcode": "plugin_invalid",
|
||||||
}, status=HTTPStatus.BAD_REQUEST)
|
},
|
||||||
|
status=HTTPStatus.BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def plugin_reload_error(error: str, stacktrace: str) -> web.Response:
|
def plugin_reload_error(error: str, stacktrace: str) -> web.Response:
|
||||||
return web.json_response({
|
return web.json_response(
|
||||||
|
{
|
||||||
"error": error,
|
"error": error,
|
||||||
"stacktrace": stacktrace,
|
"stacktrace": stacktrace,
|
||||||
"errcode": "plugin_reload_fail",
|
"errcode": "plugin_reload_fail",
|
||||||
}, status=HTTPStatus.INTERNAL_SERVER_ERROR)
|
},
|
||||||
|
status=HTTPStatus.INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def internal_server_error(self) -> web.Response:
|
def internal_server_error(self) -> web.Response:
|
||||||
return web.json_response({
|
return web.json_response(
|
||||||
|
{
|
||||||
"error": "Internal server error",
|
"error": "Internal server error",
|
||||||
"errcode": "internal_server_error",
|
"errcode": "internal_server_error",
|
||||||
}, status=HTTPStatus.INTERNAL_SERVER_ERROR)
|
},
|
||||||
|
status=HTTPStatus.INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def invalid_server(self) -> web.Response:
|
def invalid_server(self) -> web.Response:
|
||||||
return web.json_response({
|
return web.json_response(
|
||||||
|
{
|
||||||
"error": "Invalid registration server object in maubot configuration",
|
"error": "Invalid registration server object in maubot configuration",
|
||||||
"errcode": "invalid_server",
|
"errcode": "invalid_server",
|
||||||
}, status=HTTPStatus.INTERNAL_SERVER_ERROR)
|
},
|
||||||
|
status=HTTPStatus.INTERNAL_SERVER_ERROR,
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def unsupported_plugin_loader(self) -> web.Response:
|
def unsupported_plugin_loader(self) -> web.Response:
|
||||||
return web.json_response({
|
return web.json_response(
|
||||||
|
{
|
||||||
"error": "Existing plugin with same ID uses unsupported plugin loader",
|
"error": "Existing plugin with same ID uses unsupported plugin loader",
|
||||||
"errcode": "unsupported_plugin_loader",
|
"errcode": "unsupported_plugin_loader",
|
||||||
}, status=HTTPStatus.BAD_REQUEST)
|
},
|
||||||
|
status=HTTPStatus.BAD_REQUEST,
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def not_implemented(self) -> web.Response:
|
def not_implemented(self) -> web.Response:
|
||||||
return web.json_response({
|
return web.json_response(
|
||||||
|
{
|
||||||
"error": "Not implemented",
|
"error": "Not implemented",
|
||||||
"errcode": "not_implemented",
|
"errcode": "not_implemented",
|
||||||
}, status=HTTPStatus.NOT_IMPLEMENTED)
|
},
|
||||||
|
status=HTTPStatus.NOT_IMPLEMENTED,
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def ok(self) -> web.Response:
|
def ok(self) -> web.Response:
|
||||||
return web.json_response({
|
return web.json_response(
|
||||||
"success": True,
|
{"success": True},
|
||||||
}, status=HTTPStatus.OK)
|
status=HTTPStatus.OK,
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def deleted(self) -> web.Response:
|
def deleted(self) -> web.Response:
|
||||||
@ -287,19 +473,15 @@ class _Response:
|
|||||||
def found(data: dict) -> web.Response:
|
def found(data: dict) -> web.Response:
|
||||||
return web.json_response(data, status=HTTPStatus.OK)
|
return web.json_response(data, status=HTTPStatus.OK)
|
||||||
|
|
||||||
def updated(self, data: dict) -> web.Response:
|
@staticmethod
|
||||||
return self.found(data)
|
def updated(data: dict, is_login: bool = False) -> web.Response:
|
||||||
|
return web.json_response(data, status=HTTPStatus.ACCEPTED if is_login else HTTPStatus.OK)
|
||||||
|
|
||||||
def logged_in(self, token: str) -> web.Response:
|
def logged_in(self, token: str) -> web.Response:
|
||||||
return self.found({
|
return self.found({"token": token})
|
||||||
"token": token,
|
|
||||||
})
|
|
||||||
|
|
||||||
def pong(self, user: str, features: dict) -> web.Response:
|
def pong(self, user: str, features: dict) -> web.Response:
|
||||||
return self.found({
|
return self.found({"username": user, "features": features})
|
||||||
"username": user,
|
|
||||||
"features": features,
|
|
||||||
})
|
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def created(data: dict) -> web.Response:
|
def created(data: dict) -> web.Response:
|
||||||
|
@ -1,93 +1,2 @@
|
|||||||
# Maubot Management API
|
# Maubot Management API
|
||||||
Most of the API is simple HTTP+JSON and has OpenAPI documentation (see
|
This document has been moved to docs.mau.fi: <https://docs.mau.fi/maubot/management-api.html>
|
||||||
[spec.yaml](spec.yaml), [rendered](https://maubot.xyz/spec/)). However,
|
|
||||||
some parts of the API aren't documented in the OpenAPI document.
|
|
||||||
|
|
||||||
## Matrix API proxy
|
|
||||||
The full Matrix API can be accessed for each client with a request to
|
|
||||||
`/_matrix/maubot/v1/proxy/<user>/<path>`. `<user>` is the Matrix user
|
|
||||||
ID of the user to access the API as and `<path>` is the whole API
|
|
||||||
path to access (e.g. `/_matrix/client/r0/whoami`).
|
|
||||||
|
|
||||||
The body, headers, query parameters, etc are sent to the Matrix server
|
|
||||||
as-is, with a few exceptions:
|
|
||||||
* The `Authorization` header will be replaced with the access token
|
|
||||||
for the Matrix user from the maubot database.
|
|
||||||
* The `access_token` query parameter will be removed.
|
|
||||||
|
|
||||||
## Log viewing
|
|
||||||
1. Open websocket to `/_matrix/maubot/v1/logs`.
|
|
||||||
2. Send authentication token as a plain string.
|
|
||||||
3. Server will respond with `{"auth_success": true}` and then with
|
|
||||||
`{"history": ...}` where `...` is a list of log entries.
|
|
||||||
4. Server will send new log entries as JSON.
|
|
||||||
|
|
||||||
### Log entry object format
|
|
||||||
Log entries are a JSON-serialized form of Python log records.
|
|
||||||
|
|
||||||
Log entries will always have:
|
|
||||||
* `id` - A string that should uniquely identify the row. Currently
|
|
||||||
uses the `relativeCreated` field of Python logging records.
|
|
||||||
* `msg` - The text in the entry.
|
|
||||||
* `time` - The ISO date when the log entry was created.
|
|
||||||
|
|
||||||
Log entries should also always have:
|
|
||||||
* `levelname` - The log level (e.g. `DEBUG` or `ERROR`).
|
|
||||||
* `levelno` - The integer log level.
|
|
||||||
* `name` - The name of the logger. Common values:
|
|
||||||
* `maubot.client.<mxid>` - Client loggers (Matrix HTTP requests)
|
|
||||||
* `maubot.instance.<id>` - Plugin instance loggers
|
|
||||||
* `maubot.loader.zip` - The zip plugin loader (plugins don't
|
|
||||||
have their own logs)
|
|
||||||
* `module` - The Python module name where the log call happened.
|
|
||||||
* `pathname` - The full path of the file where the log call happened.
|
|
||||||
* `filename` - The file name portion of `pathname`
|
|
||||||
* `lineno` - The line in code where the log call happened.
|
|
||||||
* `funcName` - The name of the function where the log call happened.
|
|
||||||
|
|
||||||
Log entries might have:
|
|
||||||
* `exc_info` - The formatted exception info if an exception was logged.
|
|
||||||
* `matrix_http_request` - The info about a Matrix HTTP request. Subfields:
|
|
||||||
* `method` - The HTTP method used.
|
|
||||||
* `path` - The API path used.
|
|
||||||
* `content` - The content sent.
|
|
||||||
* `user` - The appservice user who the request was ran as.
|
|
||||||
|
|
||||||
## Debug file open
|
|
||||||
For debug and development purposes, the API and frontend support
|
|
||||||
clicking on lines in stack traces to open that line in the selected
|
|
||||||
editor.
|
|
||||||
|
|
||||||
### Configuration
|
|
||||||
First, the directory where maubot is run from must have a
|
|
||||||
`.dev-open-cfg.yaml` file. The file should contain the following
|
|
||||||
fields:
|
|
||||||
* `editor` - The command to run to open a file.
|
|
||||||
* `$path` is replaced with the full file path.
|
|
||||||
* `$line` is replaced with the line number.
|
|
||||||
* `pathmap` - A list of find-and-replaces to execute on paths.
|
|
||||||
These are needed to map things like `.mbp` files to the extracted
|
|
||||||
sources on disk. Each pathmap entry should have:
|
|
||||||
* `find` - The regex to match.
|
|
||||||
* `replace` - The replacement. May insert capture groups with Python
|
|
||||||
syntax (e.g. `\1`)
|
|
||||||
|
|
||||||
Example file:
|
|
||||||
```yaml
|
|
||||||
editor: pycharm --line $line $path
|
|
||||||
pathmap:
|
|
||||||
- find: "maubot/plugins/xyz\\.maubot\\.(.+)-v.+(?:-ts[0-9]+)?.mbp"
|
|
||||||
replace: "mbplugins/\\1"
|
|
||||||
- find: "maubot/.venv/lib/python3.6/site-packages/mautrix"
|
|
||||||
replace: "mautrix-python/mautrix"
|
|
||||||
```
|
|
||||||
|
|
||||||
### API
|
|
||||||
Clients can `GET /_matrix/maubot/v1/debug/open` to check if the file
|
|
||||||
open endpoint has been set up. The response is a JSON object with a
|
|
||||||
single field `enabled`. If the value is true, the endpoint can be used.
|
|
||||||
|
|
||||||
To open files, clients can `POST /_matrix/maubot/v1/debug/open` with
|
|
||||||
a JSON body containing
|
|
||||||
* `path` - The full file path to open
|
|
||||||
* `line` - The line number to open
|
|
||||||
|
@ -366,7 +366,7 @@ paths:
|
|||||||
schema:
|
schema:
|
||||||
$ref: '#/components/schemas/MatrixClient'
|
$ref: '#/components/schemas/MatrixClient'
|
||||||
responses:
|
responses:
|
||||||
200:
|
202:
|
||||||
description: Client updated
|
description: Client updated
|
||||||
content:
|
content:
|
||||||
application/json:
|
application/json:
|
||||||
@ -454,6 +454,12 @@ paths:
|
|||||||
required: true
|
required: true
|
||||||
schema:
|
schema:
|
||||||
type: string
|
type: string
|
||||||
|
- name: update_client
|
||||||
|
in: query
|
||||||
|
description: Should maubot store the access details in a Client instead of returning them?
|
||||||
|
required: false
|
||||||
|
schema:
|
||||||
|
type: boolean
|
||||||
post:
|
post:
|
||||||
operationId: client_auth_register
|
operationId: client_auth_register
|
||||||
summary: |
|
summary: |
|
||||||
@ -475,18 +481,29 @@ paths:
|
|||||||
properties:
|
properties:
|
||||||
access_token:
|
access_token:
|
||||||
type: string
|
type: string
|
||||||
example: token_here
|
example: syt_123_456_789
|
||||||
user_id:
|
user_id:
|
||||||
type: string
|
type: string
|
||||||
example: '@putkiteippi:maunium.net'
|
example: '@putkiteippi:maunium.net'
|
||||||
home_server:
|
|
||||||
type: string
|
|
||||||
example: maunium.net
|
|
||||||
device_id:
|
device_id:
|
||||||
type: string
|
type: string
|
||||||
example: device_id_here
|
example: maubot_F00BAR12
|
||||||
|
201:
|
||||||
|
description: Client created (when update_client is true)
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: '#/components/schemas/MatrixClient'
|
||||||
401:
|
401:
|
||||||
$ref: '#/components/responses/Unauthorized'
|
$ref: '#/components/responses/Unauthorized'
|
||||||
|
409:
|
||||||
|
description: |
|
||||||
|
There is already a client with the user ID of that token.
|
||||||
|
This should usually not happen, because the user ID was just created.
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: '#/components/schemas/Error'
|
||||||
500:
|
500:
|
||||||
$ref: '#/components/responses/MatrixServerError'
|
$ref: '#/components/responses/MatrixServerError'
|
||||||
'/client/auth/{server}/login':
|
'/client/auth/{server}/login':
|
||||||
@ -497,6 +514,12 @@ paths:
|
|||||||
required: true
|
required: true
|
||||||
schema:
|
schema:
|
||||||
type: string
|
type: string
|
||||||
|
- name: update_client
|
||||||
|
in: query
|
||||||
|
description: Should maubot store the access details in a Client instead of returning them?
|
||||||
|
required: false
|
||||||
|
schema:
|
||||||
|
type: boolean
|
||||||
post:
|
post:
|
||||||
operationId: client_auth_login
|
operationId: client_auth_login
|
||||||
summary: Log in to the given Matrix server via the maubot server
|
summary: Log in to the given Matrix server via the maubot server
|
||||||
@ -519,10 +542,22 @@ paths:
|
|||||||
example: '@putkiteippi:maunium.net'
|
example: '@putkiteippi:maunium.net'
|
||||||
access_token:
|
access_token:
|
||||||
type: string
|
type: string
|
||||||
example: token_here
|
example: syt_123_456_789
|
||||||
device_id:
|
device_id:
|
||||||
type: string
|
type: string
|
||||||
example: device_id_here
|
example: maubot_F00BAR12
|
||||||
|
201:
|
||||||
|
description: Client created (when update_client is true)
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: '#/components/schemas/MatrixClient'
|
||||||
|
202:
|
||||||
|
description: Client updated (when update_client is true)
|
||||||
|
content:
|
||||||
|
application/json:
|
||||||
|
schema:
|
||||||
|
$ref: '#/components/schemas/MatrixClient'
|
||||||
401:
|
401:
|
||||||
$ref: '#/components/responses/Unauthorized'
|
$ref: '#/components/responses/Unauthorized'
|
||||||
500:
|
500:
|
||||||
@ -641,6 +676,12 @@ components:
|
|||||||
access_token:
|
access_token:
|
||||||
type: string
|
type: string
|
||||||
description: The Matrix access token for this client.
|
description: The Matrix access token for this client.
|
||||||
|
device_id:
|
||||||
|
type: string
|
||||||
|
description: The Matrix device ID corresponding to the access token.
|
||||||
|
fingerprint:
|
||||||
|
type: string
|
||||||
|
description: The encryption device fingerprint for verification.
|
||||||
enabled:
|
enabled:
|
||||||
type: boolean
|
type: boolean
|
||||||
example: true
|
example: true
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
{
|
{
|
||||||
"name": "maubot-manager",
|
"name": "maubot-manager",
|
||||||
"version": "0.1.0",
|
"version": "0.1.1",
|
||||||
"private": true,
|
"private": true,
|
||||||
"author": "Tulir Asokan",
|
"author": "Tulir Asokan",
|
||||||
"license": "AGPL-3.0-or-later",
|
"license": "AGPL-3.0-or-later",
|
||||||
@ -13,15 +13,15 @@
|
|||||||
},
|
},
|
||||||
"homepage": ".",
|
"homepage": ".",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"node-sass": "^4.12.0",
|
"react": "^17.0.2",
|
||||||
"react": "^16.8.6",
|
"react-ace": "^9.4.1",
|
||||||
"react-ace": "^8.0.0",
|
"react-contextmenu": "^2.14.0",
|
||||||
"react-contextmenu": "^2.11.0",
|
"react-dom": "^17.0.2",
|
||||||
"react-dom": "^16.8.6",
|
"react-json-tree": "^0.16.1",
|
||||||
"react-json-tree": "^0.11.2",
|
"react-router-dom": "^5.3.0",
|
||||||
"react-router-dom": "^5.0.1",
|
"react-scripts": "5.0.0",
|
||||||
"react-scripts": "3.3.0",
|
"react-select": "^5.2.1",
|
||||||
"react-select": "^3.0.4"
|
"sass": "^1.34.1"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"start": "react-scripts start",
|
"start": "react-scripts start",
|
||||||
@ -30,15 +30,11 @@
|
|||||||
"eject": "react-scripts eject"
|
"eject": "react-scripts eject"
|
||||||
},
|
},
|
||||||
"browserslist": [
|
"browserslist": [
|
||||||
"last 5 firefox versions",
|
"last 2 firefox versions",
|
||||||
"last 3 and_ff versions",
|
"last 2 and_ff versions",
|
||||||
"last 5 chrome versions",
|
"last 2 chrome versions",
|
||||||
"last 3 and_chr versions",
|
"last 2 and_chr versions",
|
||||||
"last 2 safari versions",
|
"last 1 safari versions",
|
||||||
"last 2 ios_saf versions"
|
"last 1 ios_saf versions"
|
||||||
],
|
]
|
||||||
"devDependencies": {
|
|
||||||
"sass-lint": "^1.13.1",
|
|
||||||
"sass-lint-auto-fix": "^0.17.0"
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
<!--
|
<!--
|
||||||
maubot - A plugin-based Matrix bot system.
|
maubot - A plugin-based Matrix bot system.
|
||||||
Copyright (C) 2019 Tulir Asokan
|
Copyright (C) 2022 Tulir Asokan
|
||||||
|
|
||||||
This program is free software: you can redistribute it and/or modify
|
This program is free software: you can redistribute it and/or modify
|
||||||
it under the terms of the GNU Affero General Public License as published by
|
it under the terms of the GNU Affero General Public License as published by
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
// maubot - A plugin-based Matrix bot system.
|
// maubot - A plugin-based Matrix bot system.
|
||||||
// Copyright (C) 2019 Tulir Asokan
|
// Copyright (C) 2022 Tulir Asokan
|
||||||
//
|
//
|
||||||
// This program is free software: you can redistribute it and/or modify
|
// This program is free software: you can redistribute it and/or modify
|
||||||
// it under the terms of the GNU Affero General Public License as published by
|
// it under the terms of the GNU Affero General Public License as published by
|
||||||
@ -14,7 +14,11 @@
|
|||||||
// You should have received a copy of the GNU Affero General Public License
|
// You should have received a copy of the GNU Affero General Public License
|
||||||
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
// along with this program. If not, see <https://www.gnu.org/licenses/>.
|
||||||
|
|
||||||
export const BASE_PATH = "/_matrix/maubot/v1"
|
let BASE_PATH = "/_matrix/maubot/v1"
|
||||||
|
|
||||||
|
export function setBasePath(basePath) {
|
||||||
|
BASE_PATH = basePath
|
||||||
|
}
|
||||||
|
|
||||||
function getHeaders(contentType = "application/json") {
|
function getHeaders(contentType = "application/json") {
|
||||||
return {
|
return {
|
||||||
@ -210,10 +214,10 @@ export async function uploadAvatar(id, data, mime) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export function getAvatarURL({ id, avatar_url }) {
|
export function getAvatarURL({ id, avatar_url }) {
|
||||||
avatar_url = avatar_url || ""
|
if (!avatar_url?.startsWith("mxc://")) {
|
||||||
if (avatar_url.startsWith("mxc://")) {
|
return null
|
||||||
avatar_url = avatar_url.substr("mxc://".length)
|
|
||||||
}
|
}
|
||||||
|
avatar_url = avatar_url.substr("mxc://".length)
|
||||||
return `${BASE_PATH}/proxy/${id}/_matrix/media/r0/download/${avatar_url}?access_token=${
|
return `${BASE_PATH}/proxy/${id}/_matrix/media/r0/download/${avatar_url}?access_token=${
|
||||||
localStorage.accessToken}`
|
localStorage.accessToken}`
|
||||||
}
|
}
|
||||||
@ -240,9 +244,9 @@ export async function doClientAuth(server, type, username, password) {
|
|||||||
return await resp.json()
|
return await resp.json()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// eslint-disable-next-line import/no-anonymous-default-export
|
||||||
export default {
|
export default {
|
||||||
BASE_PATH,
|
login, ping, setBasePath, getFeatures, remoteGetFeatures,
|
||||||
login, ping, getFeatures, remoteGetFeatures,
|
|
||||||
openLogSocket,
|
openLogSocket,
|
||||||
debugOpenFile, debugOpenFileEnabled, updateDebugOpenFileEnabled,
|
debugOpenFile, debugOpenFileEnabled, updateDebugOpenFileEnabled,
|
||||||
getInstances, getInstance, putInstance, deleteInstance,
|
getInstances, getInstance, putInstance, deleteInstance,
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
// maubot - A plugin-based Matrix bot system.
|
// maubot - A plugin-based Matrix bot system.
|
||||||
// Copyright (C) 2019 Tulir Asokan
|
// Copyright (C) 2022 Tulir Asokan
|
||||||
//
|
//
|
||||||
// This program is free software: you can redistribute it and/or modify
|
// This program is free software: you can redistribute it and/or modify
|
||||||
// it under the terms of the GNU Affero General Public License as published by
|
// it under the terms of the GNU Affero General Public License as published by
|
||||||
@ -57,7 +57,9 @@ export const PrefSwitch = ({ rowName, active, origActive, fullWidth = false, ...
|
|||||||
</PrefRow>
|
</PrefRow>
|
||||||
)
|
)
|
||||||
|
|
||||||
export const PrefSelect = ({ rowName, value, origValue, fullWidth = false, creatable = false, ...args }) => (
|
export const PrefSelect = ({
|
||||||
|
rowName, value, origValue, fullWidth = false, creatable = false, ...args
|
||||||
|
}) => (
|
||||||
<PrefRow name={rowName} fullWidth={fullWidth} labelFor={rowName}
|
<PrefRow name={rowName} fullWidth={fullWidth} labelFor={rowName}
|
||||||
changed={origValue !== undefined && value.id !== origValue}>
|
changed={origValue !== undefined && value.id !== origValue}>
|
||||||
{creatable
|
{creatable
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
// maubot - A plugin-based Matrix bot system.
|
// maubot - A plugin-based Matrix bot system.
|
||||||
// Copyright (C) 2019 Tulir Asokan
|
// Copyright (C) 2022 Tulir Asokan
|
||||||
//
|
//
|
||||||
// This program is free software: you can redistribute it and/or modify
|
// This program is free software: you can redistribute it and/or modify
|
||||||
// it under the terms of the GNU Affero General Public License as published by
|
// it under the terms of the GNU Affero General Public License as published by
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
// maubot - A plugin-based Matrix bot system.
|
// maubot - A plugin-based Matrix bot system.
|
||||||
// Copyright (C) 2019 Tulir Asokan
|
// Copyright (C) 2022 Tulir Asokan
|
||||||
//
|
//
|
||||||
// This program is free software: you can redistribute it and/or modify
|
// This program is free software: you can redistribute it and/or modify
|
||||||
// it under the terms of the GNU Affero General Public License as published by
|
// it under the terms of the GNU Affero General Public License as published by
|
||||||
@ -23,11 +23,13 @@ class Switch extends Component {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
componentWillReceiveProps(nextProps) {
|
componentDidUpdate(prevProps) {
|
||||||
|
if (prevProps.active !== this.props.active) {
|
||||||
this.setState({
|
this.setState({
|
||||||
active: nextProps.active,
|
active: this.props.active,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
toggle = () => {
|
toggle = () => {
|
||||||
if (this.props.onToggle) {
|
if (this.props.onToggle) {
|
||||||
|
Binary file not shown.
Binary file not shown.
@ -1,5 +1,5 @@
|
|||||||
// maubot - A plugin-based Matrix bot system.
|
// maubot - A plugin-based Matrix bot system.
|
||||||
// Copyright (C) 2019 Tulir Asokan
|
// Copyright (C) 2022 Tulir Asokan
|
||||||
//
|
//
|
||||||
// This program is free software: you can redistribute it and/or modify
|
// This program is free software: you can redistribute it and/or modify
|
||||||
// it under the terms of the GNU Affero General Public License as published by
|
// it under the terms of the GNU Affero General Public License as published by
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
// maubot - A plugin-based Matrix bot system.
|
// maubot - A plugin-based Matrix bot system.
|
||||||
// Copyright (C) 2019 Tulir Asokan
|
// Copyright (C) 2022 Tulir Asokan
|
||||||
//
|
//
|
||||||
// This program is free software: you can redistribute it and/or modify
|
// This program is free software: you can redistribute it and/or modify
|
||||||
// it under the terms of the GNU Affero General Public License as published by
|
// it under the terms of the GNU Affero General Public License as published by
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
// maubot - A plugin-based Matrix bot system.
|
// maubot - A plugin-based Matrix bot system.
|
||||||
// Copyright (C) 2019 Tulir Asokan
|
// Copyright (C) 2022 Tulir Asokan
|
||||||
//
|
//
|
||||||
// This program is free software: you can redistribute it and/or modify
|
// This program is free software: you can redistribute it and/or modify
|
||||||
// it under the terms of the GNU Affero General Public License as published by
|
// it under the terms of the GNU Affero General Public License as published by
|
||||||
@ -30,7 +30,8 @@ class Main extends Component {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async componentWillMount() {
|
async componentDidMount() {
|
||||||
|
await this.getBasePath()
|
||||||
if (localStorage.accessToken) {
|
if (localStorage.accessToken) {
|
||||||
await this.ping()
|
await this.ping()
|
||||||
} else {
|
} else {
|
||||||
@ -39,6 +40,19 @@ class Main extends Component {
|
|||||||
this.setState({ pinged: true })
|
this.setState({ pinged: true })
|
||||||
}
|
}
|
||||||
|
|
||||||
|
async getBasePath() {
|
||||||
|
try {
|
||||||
|
const resp = await fetch(process.env.PUBLIC_URL + "/paths.json", {
|
||||||
|
headers: { "Content-Type": "application/json" },
|
||||||
|
})
|
||||||
|
const apiPaths = await resp.json()
|
||||||
|
api.setBasePath(apiPaths.api_path)
|
||||||
|
} catch (err) {
|
||||||
|
console.error("Failed to get API path:", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
async ping() {
|
async ping() {
|
||||||
try {
|
try {
|
||||||
const username = await api.ping()
|
const username = await api.ping()
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user