Compare commits
68 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6f04530700 | ||
|
|
caf67fdf2b | ||
|
|
034794d58d | ||
|
|
e53ce92c96 | ||
|
|
630ac5fd8c | ||
|
|
b269fa0fc7 | ||
|
|
208cc7192e | ||
|
|
80e9145a4f | ||
|
|
78d370d3f4 | ||
|
|
f279a14693 | ||
|
|
b54bf3c4d5 | ||
|
|
582abba793 | ||
|
|
94da42ffb9 | ||
|
|
08d3aef177 | ||
|
|
7671b61a6b | ||
|
|
47b308f9b7 | ||
|
|
1a5931c3df | ||
|
|
b3d771e063 | ||
|
|
134796aa9f | ||
|
|
1598f096e9 | ||
|
|
99ed6eface | ||
|
|
ce6bf7c548 | ||
|
|
5677ff798f | ||
|
|
e47004097a | ||
|
|
5e3a4f3446 | ||
|
|
8e61ee60d5 | ||
|
|
a426453d7f | ||
|
|
1ac9bd0e68 | ||
|
|
a83c305e51 | ||
|
|
7b171cf59a | ||
|
|
b237c71b99 | ||
|
|
2eff37684d | ||
|
|
836823a5cd | ||
|
|
e1d4df0b04 | ||
|
|
70bbe7f5ad | ||
|
|
6d796df097 | ||
|
|
6cd6b412fe | ||
|
|
042429a11d | ||
|
|
c440df631f | ||
|
|
3247ffc8ea | ||
|
|
ef17c280b1 | ||
|
|
d0cdfa97c7 | ||
|
|
f0bbcfd2c8 | ||
|
|
08b7c6ce33 | ||
|
|
719708dfd0 | ||
|
|
b82cb83318 | ||
|
|
d9f4adcb0e | ||
|
|
e5bc06a617 | ||
|
|
af49871801 | ||
|
|
7d1f5abc13 | ||
|
|
31a8ba24a0 | ||
|
|
9e1b58d033 | ||
|
|
1acc8cd78c | ||
|
|
3140af63de | ||
|
|
829ebf59f7 | ||
|
|
4ce145bac2 | ||
|
|
6ef229f3d0 | ||
|
|
19b4fd520a | ||
|
|
70146e0b70 | ||
|
|
a804368806 | ||
|
|
3ec42fffaa | ||
|
|
95727335a7 | ||
|
|
79f9a3a5c2 | ||
|
|
7daebc308b | ||
|
|
50017cff36 | ||
|
|
f812c9e666 | ||
|
|
87a35af693 | ||
|
|
4c4a397f66 |
@@ -1,7 +1,9 @@
|
||||
FROM rust:1.66
|
||||
FROM rust:1.72
|
||||
|
||||
ARG USERNAME=lldapdev
|
||||
ARG USER_UID=1000
|
||||
# We need to keep the user as 1001 to match the GitHub runner's UID.
|
||||
# See https://github.com/actions/checkout/issues/956.
|
||||
ARG USER_UID=1001
|
||||
ARG USER_GID=$USER_UID
|
||||
|
||||
# Create the user
|
||||
@@ -21,4 +23,4 @@ RUN RUSTFLAGS=-Ctarget-feature=-crt-static cargo install wasm-pack \
|
||||
|
||||
USER $USERNAME
|
||||
ENV CARGO_HOME=/home/$USERNAME/.cargo
|
||||
ENV SHELL=/bin/bash
|
||||
ENV SHELL=/bin/bash
|
||||
|
||||
2
.gitattributes
vendored
2
.gitattributes
vendored
@@ -1,4 +1,4 @@
|
||||
example-configs/** linguist-documentation
|
||||
example_configs/** linguist-documentation
|
||||
docs/** linguist-documentation
|
||||
*.md linguist-documentation
|
||||
lldap_config.docker_template.toml linguist-documentation
|
||||
|
||||
5
.github/FUNDING.yml
vendored
Normal file
5
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1,5 @@
|
||||
# These are supported funding model platforms
|
||||
|
||||
github: [lldap]
|
||||
|
||||
custom: ['https://bmc.link/nitnelave']
|
||||
6
.github/workflows/Dockerfile.ci.alpine
vendored
6
.github/workflows/Dockerfile.ci.alpine
vendored
@@ -32,9 +32,9 @@ RUN if [ "${TARGETPLATFORM}" = "linux/arm64" ]; then \
|
||||
; fi
|
||||
|
||||
RUN if [ "${TARGETPLATFORM}" = "linux/arm/v7" ]; then \
|
||||
mv bin/armv7-unknown-linux-gnueabihf-lldap-bin/lldap target/lldap && \
|
||||
mv bin/armv7-unknown-linux-gnueabihf-lldap_migration_tool-bin/lldap_migration_tool target/lldap_migration_tool && \
|
||||
mv bin/armv7-unknown-linux-gnueabihf-lldap_set_password-bin/lldap_set_password target/lldap_set_password && \
|
||||
mv bin/armv7-unknown-linux-musleabihf-lldap-bin/lldap target/lldap && \
|
||||
mv bin/armv7-unknown-linux-musleabihf-lldap_migration_tool-bin/lldap_migration_tool target/lldap_migration_tool && \
|
||||
mv bin/armv7-unknown-linux-musleabihf-lldap_set_password-bin/lldap_set_password target/lldap_set_password && \
|
||||
chmod +x target/lldap && \
|
||||
chmod +x target/lldap_migration_tool && \
|
||||
chmod +x target/lldap_set_password && \
|
||||
|
||||
6
.github/workflows/Dockerfile.ci.debian
vendored
6
.github/workflows/Dockerfile.ci.debian
vendored
@@ -32,9 +32,9 @@ RUN if [ "${TARGETPLATFORM}" = "linux/arm64" ]; then \
|
||||
; fi
|
||||
|
||||
RUN if [ "${TARGETPLATFORM}" = "linux/arm/v7" ]; then \
|
||||
mv bin/armv7-unknown-linux-gnueabihf-lldap-bin/lldap target/lldap && \
|
||||
mv bin/armv7-unknown-linux-gnueabihf-lldap_migration_tool-bin/lldap_migration_tool target/lldap_migration_tool && \
|
||||
mv bin/armv7-unknown-linux-gnueabihf-lldap_set_password-bin/lldap_set_password target/lldap_set_password && \
|
||||
mv bin/armv7-unknown-linux-musleabihf-lldap-bin/lldap target/lldap && \
|
||||
mv bin/armv7-unknown-linux-musleabihf-lldap_migration_tool-bin/lldap_migration_tool target/lldap_migration_tool && \
|
||||
mv bin/armv7-unknown-linux-musleabihf-lldap_set_password-bin/lldap_set_password target/lldap_set_password && \
|
||||
chmod +x target/lldap && \
|
||||
chmod +x target/lldap_migration_tool && \
|
||||
chmod +x target/lldap_set_password && \
|
||||
|
||||
49
.github/workflows/Dockerfile.dev
vendored
49
.github/workflows/Dockerfile.dev
vendored
@@ -1,45 +1,40 @@
|
||||
# Keep tracking base image
|
||||
FROM rust:1.66-slim-bullseye
|
||||
FROM rust:1.71-slim-bookworm
|
||||
|
||||
# Set needed env path
|
||||
ENV PATH="/opt/aarch64-linux-musl-cross/:/opt/aarch64-linux-musl-cross/bin/:/opt/x86_64-linux-musl-cross/:/opt/x86_64-linux-musl-cross/bin/:$PATH"
|
||||
ENV PATH="/opt/armv7l-linux-musleabihf-cross/:/opt/armv7l-linux-musleabihf-cross/bin/:/opt/aarch64-linux-musl-cross/:/opt/aarch64-linux-musl-cross/bin/:/opt/x86_64-linux-musl-cross/:/opt/x86_64-linux-musl-cross/bin/:$PATH"
|
||||
|
||||
### Install build deps x86_64
|
||||
# Set building env
|
||||
ENV CARGO_REGISTRIES_CRATES_IO_PROTOCOL=sparse \
|
||||
CARGO_NET_GIT_FETCH_WITH_CLI=true \
|
||||
CARGO_TARGET_ARMV7_UNKNOWN_LINUX_MUSLEABIHF_LINKER=armv7l-linux-musleabihf-gcc \
|
||||
CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_LINKER=aarch64-linux-musl-gcc \
|
||||
CARGO_TARGET_X86_64_UNKNOWN_LINUX_MUSL_LINKER=x86_64-linux-musl-gcc \
|
||||
CC_armv7_unknown_linux_musleabihf=armv7l-linux-musleabihf-gcc \
|
||||
CC_x86_64_unknown_linux_musl=x86_64-linux-musl-gcc \
|
||||
CC_aarch64_unknown_linux_musl=aarch64-linux-musl-gcc
|
||||
|
||||
### Install Additional Build Tools
|
||||
RUN apt update && \
|
||||
apt install -y --no-install-recommends curl git wget build-essential make perl pkg-config curl tar jq musl-tools gzip && \
|
||||
curl -fsSL https://deb.nodesource.com/setup_lts.x | bash - && \
|
||||
apt update && \
|
||||
apt install -y --no-install-recommends nodejs && \
|
||||
apt install -y --no-install-recommends curl git wget make perl pkg-config tar jq gzip && \
|
||||
apt clean && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
### Install build deps aarch64 build
|
||||
RUN dpkg --add-architecture arm64 && \
|
||||
apt update && \
|
||||
apt install -y gcc-aarch64-linux-gnu g++-aarch64-linux-gnu libc6-arm64-cross libc6-dev-arm64-cross gzip && \
|
||||
apt clean && \
|
||||
rm -rf /var/lib/apt/lists/* && \
|
||||
rustup target add aarch64-unknown-linux-gnu
|
||||
|
||||
### armhf deps
|
||||
RUN dpkg --add-architecture armhf && \
|
||||
apt update && \
|
||||
apt install -y gcc-arm-linux-gnueabihf g++-arm-linux-gnueabihf libc6-armhf-cross libc6-dev-armhf-cross gzip && \
|
||||
apt clean && \
|
||||
rm -rf /var/lib/apt/lists/* && \
|
||||
rustup target add armv7-unknown-linux-gnueabihf
|
||||
|
||||
### Add musl-gcc aarch64 and x86_64
|
||||
|
||||
### Add musl-gcc aarch64, x86_64 and armv7l
|
||||
RUN wget -c https://musl.cc/x86_64-linux-musl-cross.tgz && \
|
||||
tar zxf ./x86_64-linux-musl-cross.tgz -C /opt && \
|
||||
wget -c https://musl.cc/aarch64-linux-musl-cross.tgz && \
|
||||
tar zxf ./aarch64-linux-musl-cross.tgz -C /opt && \
|
||||
wget -c http://musl.cc/armv7l-linux-musleabihf-cross.tgz && \
|
||||
tar zxf ./armv7l-linux-musleabihf-cross.tgz -C /opt && \
|
||||
rm ./x86_64-linux-musl-cross.tgz && \
|
||||
rm ./aarch64-linux-musl-cross.tgz
|
||||
rm ./aarch64-linux-musl-cross.tgz && \
|
||||
rm ./armv7l-linux-musleabihf-cross.tgz
|
||||
|
||||
### Add musl target
|
||||
RUN rustup target add x86_64-unknown-linux-musl && \
|
||||
rustup target add aarch64-unknown-linux-musl
|
||||
rustup target add aarch64-unknown-linux-musl && \
|
||||
rustup target add armv7-unknown-linux-musleabihf
|
||||
|
||||
|
||||
CMD ["bash"]
|
||||
|
||||
82
.github/workflows/docker-build-static.yml
vendored
82
.github/workflows/docker-build-static.yml
vendored
@@ -30,7 +30,6 @@ env:
|
||||
|
||||
# build-ui , create/compile the web
|
||||
### install wasm
|
||||
### install rollup
|
||||
### run app/build.sh
|
||||
### upload artifacts
|
||||
|
||||
@@ -40,10 +39,10 @@ env:
|
||||
# GitHub actions randomly timeout when downloading musl-gcc, using custom dev image #
|
||||
# Look into .github/workflows/Dockerfile.dev for development image details #
|
||||
# Using lldap dev image based on https://hub.docker.com/_/rust and musl-gcc bundled #
|
||||
# lldap/rust-dev:latest #
|
||||
#######################################################################################
|
||||
### Cargo build
|
||||
### aarch64 and amd64 is musl based
|
||||
### armv7 is glibc based, musl had issue with time_t when cross compile https://github.com/rust-lang/libc/issues/1848
|
||||
# Cargo build
|
||||
### armv7, aarch64 and amd64 is musl based
|
||||
|
||||
# build-ui,builds-armhf, build-aarch64, build-amd64 will upload artifacts will be used next job
|
||||
|
||||
@@ -51,12 +50,11 @@ env:
|
||||
### will run lldap with postgres, mariadb and sqlite backend, do selfcheck command.
|
||||
|
||||
# Build docker image
|
||||
### Triplet docker image arch with debian base
|
||||
### amd64 & aarch64 with alpine base
|
||||
### Triplet docker image arch with debian and alpine base
|
||||
# build-docker-image job will fetch artifacts and run Dockerfile.ci then push the image.
|
||||
### Look into .github/workflows/Dockerfile.ci.debian or .github/workflowds/Dockerfile.ci.alpine
|
||||
|
||||
# create release artifacts
|
||||
# Create release artifacts
|
||||
### Fetch artifacts
|
||||
### Clean up web artifact
|
||||
### Setup folder structure
|
||||
@@ -86,10 +84,10 @@ jobs:
|
||||
needs: pre_job
|
||||
if: ${{ needs.pre_job.outputs.should_skip != 'true' || github.event_name == 'release' }}
|
||||
container:
|
||||
image: nitnelave/rust-dev:latest
|
||||
image: lldap/rust-dev:latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3.5.2
|
||||
uses: actions/checkout@v4.0.0
|
||||
- uses: actions/cache@v3
|
||||
with:
|
||||
path: |
|
||||
@@ -101,8 +99,6 @@ jobs:
|
||||
key: lldap-ui-${{ hashFiles('**/Cargo.lock') }}
|
||||
restore-keys: |
|
||||
lldap-ui-
|
||||
- name: Install rollup (nodejs)
|
||||
run: npm install -g rollup
|
||||
- name: Add wasm target (rust)
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- name: Install wasm-pack with cargo
|
||||
@@ -125,20 +121,18 @@ jobs:
|
||||
needs: pre_job
|
||||
if: ${{ needs.pre_job.outputs.should_skip != 'true' || github.event_name == 'release' }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
target: [armv7-unknown-linux-gnueabihf, aarch64-unknown-linux-musl, x86_64-unknown-linux-musl]
|
||||
target: [armv7-unknown-linux-musleabihf, aarch64-unknown-linux-musl, x86_64-unknown-linux-musl]
|
||||
container:
|
||||
image: nitnelave/rust-dev:latest
|
||||
image: lldap/rust-dev:latest
|
||||
env:
|
||||
CARGO_TARGET_ARMV7_UNKNOWN_LINUX_GNUEABIHF_LINKER: arm-linux-gnueabihf-gcc
|
||||
CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_LINKER: aarch64-linux-musl-gcc
|
||||
CARGO_TARGET_X86_64_UNKNOWN_LINUX_MUSL_LINKER: x86_64-linux-musl-gcc
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTFLAGS: -Ctarget-feature=+crt-static
|
||||
CARGO_HOME: ${GITHUB_WORKSPACE}/.cargo
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3.5.2
|
||||
uses: actions/checkout@v4.0.0
|
||||
- uses: actions/cache@v3
|
||||
with:
|
||||
path: |
|
||||
@@ -186,7 +180,7 @@ jobs:
|
||||
MARIADB_ALLOW_EMPTY_ROOT_PASSWORD: 1
|
||||
options: >-
|
||||
--name mariadb
|
||||
--health-cmd="mysqladmin ping" --health-interval=5s --health-timeout=2s --health-retries=3
|
||||
--health-cmd="mariadb-admin ping" --health-interval=5s --health-timeout=2s --health-retries=3
|
||||
|
||||
postgresql:
|
||||
image: postgres:latest
|
||||
@@ -281,7 +275,7 @@ jobs:
|
||||
MARIADB_ALLOW_EMPTY_ROOT_PASSWORD: 1
|
||||
options: >-
|
||||
--name mariadb
|
||||
--health-cmd="mysqladmin ping" --health-interval=5s --health-timeout=2s --health-retries=3
|
||||
--health-cmd="mariadb-admin ping" --health-interval=5s --health-timeout=2s --health-retries=3
|
||||
|
||||
|
||||
mysql:
|
||||
@@ -299,6 +293,11 @@ jobs:
|
||||
|
||||
|
||||
steps:
|
||||
- name: Checkout scripts
|
||||
uses: actions/checkout@v4.0.0
|
||||
with:
|
||||
sparse-checkout: 'scripts'
|
||||
|
||||
- name: Download LLDAP artifacts
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
@@ -347,10 +346,8 @@ jobs:
|
||||
|
||||
- name: Export and Converting to Postgress
|
||||
run: |
|
||||
curl -L https://raw.githubusercontent.com/lldap/lldap/main/scripts/sqlite_dump_commands.sh -o helper.sh
|
||||
chmod +x ./helper.sh
|
||||
./helper.sh | sqlite3 ./users.db > ./dump.sql
|
||||
sed -i -r -e "s/X'([[:xdigit:]]+'[^'])/'\\\x\\1/g" -e '1s/^/BEGIN;\n/' -e '$aCOMMIT;' ./dump.sql
|
||||
bash ./scripts/sqlite_dump_commands.sh | sqlite3 ./users.db > ./dump.sql
|
||||
sed -i -r -e "s/X'([[:xdigit:]]+'[^'])/'\\\x\\1/g" -e ":a; s/(INSERT INTO user_attribute_schema\(.*\) VALUES\(.*),1([^']*\);)$/\1,true\2/; s/(INSERT INTO user_attribute_schema\(.*\) VALUES\(.*),0([^']*\);)$/\1,false\2/; ta" -e '1s/^/BEGIN;\n/' -e '$aCOMMIT;' ./dump.sql
|
||||
|
||||
- name: Create schema on postgres
|
||||
run: |
|
||||
@@ -358,16 +355,14 @@ jobs:
|
||||
|
||||
- name: Copy converted db to postgress and import
|
||||
run: |
|
||||
docker ps -a
|
||||
docker cp ./dump.sql postgresql:/tmp/dump.sql
|
||||
docker exec postgresql bash -c "psql -U lldapuser -d lldap < /tmp/dump.sql"
|
||||
docker exec postgresql bash -c "psql -U lldapuser -d lldap < /tmp/dump.sql" | tee import.log
|
||||
rm ./dump.sql
|
||||
! grep ERROR import.log > /dev/null
|
||||
|
||||
- name: Export and Converting to mariadb
|
||||
run: |
|
||||
curl -L https://raw.githubusercontent.com/lldap/lldap/main/scripts/sqlite_dump_commands.sh -o helper.sh
|
||||
chmod +x ./helper.sh
|
||||
./helper.sh | sqlite3 ./users.db > ./dump.sql
|
||||
bash ./scripts/sqlite_dump_commands.sh | sqlite3 ./users.db > ./dump.sql
|
||||
cp ./dump.sql ./dump-no-sed.sql
|
||||
sed -i -r -e "s/([^']'[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{9})\+00:00'([^'])/\1'\2/g" \-e 's/^INSERT INTO "?([a-zA-Z0-9_]+)"?/INSERT INTO `\1`/' -e '1s/^/START TRANSACTION;\n/' -e '$aCOMMIT;' ./dump.sql
|
||||
sed -i '1 i\SET FOREIGN_KEY_CHECKS = 0;' ./dump.sql
|
||||
@@ -377,16 +372,14 @@ jobs:
|
||||
|
||||
- name: Copy converted db to mariadb and import
|
||||
run: |
|
||||
docker ps -a
|
||||
docker cp ./dump.sql mariadb:/tmp/dump.sql
|
||||
docker exec mariadb bash -c "mariadb -ulldapuser -plldappass -f lldap < /tmp/dump.sql"
|
||||
docker exec mariadb bash -c "mariadb -ulldapuser -plldappass -f lldap < /tmp/dump.sql" | tee import.log
|
||||
rm ./dump.sql
|
||||
! grep ERROR import.log > /dev/null
|
||||
|
||||
- name: Export and Converting to mysql
|
||||
run: |
|
||||
curl -L https://raw.githubusercontent.com/lldap/lldap/main/scripts/sqlite_dump_commands.sh -o helper.sh
|
||||
chmod +x ./helper.sh
|
||||
./helper.sh | sqlite3 ./users.db > ./dump.sql
|
||||
bash ./scripts/sqlite_dump_commands.sh | sqlite3 ./users.db > ./dump.sql
|
||||
sed -i -r -e 's/^INSERT INTO "?([a-zA-Z0-9_]+)"?/INSERT INTO `\1`/' -e '1s/^/START TRANSACTION;\n/' -e '$aCOMMIT;' ./dump.sql
|
||||
sed -i '1 i\SET FOREIGN_KEY_CHECKS = 0;' ./dump.sql
|
||||
|
||||
@@ -395,10 +388,10 @@ jobs:
|
||||
|
||||
- name: Copy converted db to mysql and import
|
||||
run: |
|
||||
docker ps -a
|
||||
docker cp ./dump.sql mysql:/tmp/dump.sql
|
||||
docker exec mysql bash -c "mysql -ulldapuser -plldappass -f lldap < /tmp/dump.sql"
|
||||
docker exec mysql bash -c "mysql -ulldapuser -plldappass -f lldap < /tmp/dump.sql" | tee import.log
|
||||
rm ./dump.sql
|
||||
! grep ERROR import.log > /dev/null
|
||||
|
||||
- name: Run lldap with postgres DB and healthcheck again
|
||||
run: |
|
||||
@@ -434,11 +427,12 @@ jobs:
|
||||
LLDAP_http_port: 17173
|
||||
LLDAP_JWT_SECRET: somejwtsecret
|
||||
|
||||
- name: Test Dummy User
|
||||
run: |
|
||||
ldapsearch -H ldap://localhost:3891 -LLL -D "uid=dummyuser,ou=people,dc=example,dc=com" -w 'dummypassword' -s "One" -b "ou=people,dc=example,dc=com"
|
||||
ldapsearch -H ldap://localhost:3892 -LLL -D "uid=dummyuser,ou=people,dc=example,dc=com" -w 'dummypassword' -s "One" -b "ou=people,dc=example,dc=com"
|
||||
ldapsearch -H ldap://localhost:3893 -LLL -D "uid=dummyuser,ou=people,dc=example,dc=com" -w 'dummypassword' -s "One" -b "ou=people,dc=example,dc=com"
|
||||
- name: Test Dummy User Postgres
|
||||
run: ldapsearch -H ldap://localhost:3891 -LLL -D "uid=dummyuser,ou=people,dc=example,dc=com" -w 'dummypassword' -s "One" -b "ou=people,dc=example,dc=com"
|
||||
- name: Test Dummy User MariaDB
|
||||
run: ldapsearch -H ldap://localhost:3892 -LLL -D "uid=dummyuser,ou=people,dc=example,dc=com" -w 'dummypassword' -s "One" -b "ou=people,dc=example,dc=com"
|
||||
- name: Test Dummy User MySQL
|
||||
run: ldapsearch -H ldap://localhost:3893 -LLL -D "uid=dummyuser,ou=people,dc=example,dc=com" -w 'dummypassword' -s "One" -b "ou=people,dc=example,dc=com"
|
||||
|
||||
build-docker-image:
|
||||
needs: [build-ui, build-bin]
|
||||
@@ -477,7 +471,7 @@ jobs:
|
||||
packages: write
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v3.5.2
|
||||
uses: actions/checkout@v4.0.0
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
@@ -593,13 +587,13 @@ jobs:
|
||||
run: |
|
||||
mv bin/aarch64-unknown-linux-musl-lldap-bin/lldap bin/aarch64-lldap
|
||||
mv bin/x86_64-unknown-linux-musl-lldap-bin/lldap bin/amd64-lldap
|
||||
mv bin/armv7-unknown-linux-gnueabihf-lldap-bin/lldap bin/armhf-lldap
|
||||
mv bin/armv7-unknown-linux-musleabihf-lldap-bin/lldap bin/armhf-lldap
|
||||
mv bin/aarch64-unknown-linux-musl-lldap_migration_tool-bin/lldap_migration_tool bin/aarch64-lldap_migration_tool
|
||||
mv bin/x86_64-unknown-linux-musl-lldap_migration_tool-bin/lldap_migration_tool bin/amd64-lldap_migration_tool
|
||||
mv bin/armv7-unknown-linux-gnueabihf-lldap_migration_tool-bin/lldap_migration_tool bin/armhf-lldap_migration_tool
|
||||
mv bin/armv7-unknown-linux-musleabihf-lldap_migration_tool-bin/lldap_migration_tool bin/armhf-lldap_migration_tool
|
||||
mv bin/aarch64-unknown-linux-musl-lldap_set_password-bin/lldap_set_password bin/aarch64-lldap_set_password
|
||||
mv bin/x86_64-unknown-linux-musl-lldap_set_password-bin/lldap_set_password bin/amd64-lldap_set_password
|
||||
mv bin/armv7-unknown-linux-gnueabihf-lldap_set_password-bin/lldap_set_password bin/armhf-lldap_set_password
|
||||
mv bin/armv7-unknown-linux-musleabihf-lldap_set_password-bin/lldap_set_password bin/armhf-lldap_set_password
|
||||
chmod +x bin/*-lldap
|
||||
chmod +x bin/*-lldap_migration_tool
|
||||
chmod +x bin/*-lldap_set_password
|
||||
|
||||
20
.github/workflows/release-bot.yml
vendored
Normal file
20
.github/workflows/release-bot.yml
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
name: Release Bot
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: nflaig/release-comment-on-pr@master
|
||||
with:
|
||||
token: ${{ secrets.RELEASE_BOT_TOKEN }}
|
||||
message: |
|
||||
Thank you everyone for the contribution!
|
||||
This feature is now available in the latest release, [${releaseTag}](${releaseUrl}).
|
||||
You can support LLDAP by starring our repo, contributing some configuration examples and becoming a sponsor.
|
||||
37
.github/workflows/rust.yml
vendored
37
.github/workflows/rust.yml
vendored
@@ -33,7 +33,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v3.5.2
|
||||
uses: actions/checkout@v4.0.0
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: Build
|
||||
run: cargo build --verbose --workspace
|
||||
@@ -52,7 +52,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v3.5.2
|
||||
uses: actions/checkout@v4.0.0
|
||||
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
@@ -69,7 +69,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v3.5.2
|
||||
uses: actions/checkout@v4.0.0
|
||||
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
@@ -81,12 +81,14 @@ jobs:
|
||||
|
||||
coverage:
|
||||
name: Code coverage
|
||||
needs: pre_job
|
||||
needs:
|
||||
- pre_job
|
||||
- test
|
||||
if: ${{ needs.pre_job.outputs.should_skip != 'true' || (github.event_name == 'push' && github.ref == 'refs/heads/main') }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v3.5.2
|
||||
uses: actions/checkout@v4.0.0
|
||||
|
||||
- name: Install Rust
|
||||
run: rustup toolchain install nightly --component llvm-tools-preview && rustup component add llvm-tools-preview --toolchain stable-x86_64-unknown-linux-gnu
|
||||
@@ -112,28 +114,3 @@ jobs:
|
||||
files: lcov.info
|
||||
fail_ci_if_error: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
publish-crates:
|
||||
name: Publish on crates.io
|
||||
if: ${{ needs.pre_job.outputs.should_skip != 'true' || (github.event_name == 'push' && github.ref == 'refs/heads/main') || github.event_name == 'release' }}
|
||||
needs: pre_job
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
target: [lldap_auth, lldap, lldap_app, lldap_set_password, lldap_migration_tool]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3.5.2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
override: true
|
||||
- name: Publish ${{ matrix.target }} crate
|
||||
uses: katyo/publish-crates@v2
|
||||
with:
|
||||
args: -p ${{ matrix.target }}
|
||||
dry-run: ${{ github.event_name != 'release' }}
|
||||
check-repo: ${{ github.event_name != 'pull_request' }}
|
||||
registry-token: ${{ secrets.CARGO_REGISTRY_TOKEN }}
|
||||
ignore-unpublished-changes: ${{ github.event_name != 'release' }}
|
||||
|
||||
|
||||
63
CHANGELOG.md
63
CHANGELOG.md
@@ -5,6 +5,69 @@ All notable changes to this project will be documented in this file.
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [0.5.0] 2023-09-14
|
||||
|
||||
### Breaking
|
||||
|
||||
- Emails and UUIDs are now enforced to be unique.
|
||||
- If you have several users with the same email, you'll have to disambiguate
|
||||
them. You can do that by either issuing SQL commands directly
|
||||
(`UPDATE users SET email = 'x@x' WHERE user_id = 'bob';`), or by reverting
|
||||
to a 0.4.x version of LLDAP and editing the user through the web UI.
|
||||
An error will prevent LLDAP 0.5+ from starting otherwise.
|
||||
- This was done to prevent account takeover for systems that allow to
|
||||
login via email.
|
||||
|
||||
### Added
|
||||
|
||||
- The server private key can be set as a seed from an env variable (#504).
|
||||
- This is especially useful when you have multiple containers, they don't
|
||||
need to share a writeable folder.
|
||||
- Added support for changing the password through a plain LDAP Modify
|
||||
operation (as opposed to an extended operation), to allow Jellyfin
|
||||
to change password (#620).
|
||||
- Allow creating a user with multiple objectClass (#612).
|
||||
- Emails now have a message ID (#608).
|
||||
- Added a warning for browsers that have WASM/JS disabled (#639).
|
||||
- Added support for querying OUs in LDAP (#669).
|
||||
- Added a button to clear the avatar in the UI (#358).
|
||||
|
||||
|
||||
### Changed
|
||||
|
||||
- Groups are now sorted by name in the web UI (#623).
|
||||
- ARM build now uses musl (#584).
|
||||
- Improved logging.
|
||||
- Default admin user is only created if there are no admins (#563).
|
||||
- That allows you to remove the default admin, making it harder to
|
||||
bruteforce.
|
||||
|
||||
### Fixed
|
||||
|
||||
- Fixed URL parsing with a trailing slash in the password setting utility
|
||||
(#597).
|
||||
|
||||
In addition to all that, there was significant progress towards #67,
|
||||
user-defined attributes. That complex feature will unblock integration with many
|
||||
systems, including PAM authentication.
|
||||
|
||||
### New services
|
||||
|
||||
- Ejabberd
|
||||
- Ergo
|
||||
- LibreNMS
|
||||
- Mealie
|
||||
- MinIO
|
||||
- OpnSense
|
||||
- PfSense
|
||||
- PowerDnsAdmin
|
||||
- Proxmox
|
||||
- Squid
|
||||
- Tandoor recipes
|
||||
- TheLounge
|
||||
- Zabbix-web
|
||||
- Zulip
|
||||
|
||||
## [0.4.3] 2023-04-11
|
||||
|
||||
The repository has changed from `nitnelave/lldap` to `lldap/lldap`, both on GitHub
|
||||
|
||||
964
Cargo.lock
generated
964
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -9,6 +9,8 @@ members = [
|
||||
|
||||
default-members = ["server"]
|
||||
|
||||
resolver = "2"
|
||||
|
||||
[profile.release]
|
||||
lto = true
|
||||
|
||||
|
||||
20
README.md
20
README.md
@@ -36,15 +36,20 @@
|
||||
- [About](#about)
|
||||
- [Installation](#installation)
|
||||
- [With Docker](#with-docker)
|
||||
- [With Kubernetes](#with-kubernetes)
|
||||
- [From source](#from-source)
|
||||
- [Backend](#backend)
|
||||
- [Frontend](#frontend)
|
||||
- [Cross-compilation](#cross-compilation)
|
||||
- [Client configuration](#client-configuration)
|
||||
- [Compatible services](#compatible-services)
|
||||
- [General configuration guide](#general-configuration-guide)
|
||||
- [Sample client configurations](#sample-client-configurations)
|
||||
- [Migrating from SQLite](#migrating-from-sqlite)
|
||||
- [Comparisons with other services](#comparisons-with-other-services)
|
||||
- [vs OpenLDAP](#vs-openldap)
|
||||
- [vs FreeIPA](#vs-freeipa)
|
||||
- [vs Kanidm](#vs-kanidm)
|
||||
- [I can't log in!](#i-cant-log-in)
|
||||
- [Contributions](#contributions)
|
||||
|
||||
@@ -126,6 +131,8 @@ services:
|
||||
ports:
|
||||
# For LDAP
|
||||
- "3890:3890"
|
||||
# For LDAPS (LDAP Over SSL), enable port if LLDAP_LDAPS_OPTIONS__ENABLED set true, look env below
|
||||
#- "6360:6360"
|
||||
# For the web front-end
|
||||
- "17170:17170"
|
||||
volumes:
|
||||
@@ -139,6 +146,10 @@ services:
|
||||
- LLDAP_JWT_SECRET=REPLACE_WITH_RANDOM
|
||||
- LLDAP_LDAP_USER_PASS=REPLACE_WITH_PASSWORD
|
||||
- LLDAP_LDAP_BASE_DN=dc=example,dc=com
|
||||
# If using LDAPS, set enabled true and configure cert and key path
|
||||
# - LLDAP_LDAPS_OPTIONS__ENABLED=true
|
||||
# - LLDAP_LDAPS_OPTIONS__CERT_FILE=/path/to/certfile.crt
|
||||
# - LLDAP_LDAPS_OPTIONS__KEY_FILE=/path/to/keyfile.key
|
||||
# You can also set a different database:
|
||||
# - LLDAP_DATABASE_URL=mysql://mysql-user:password@mysql-server/my-database
|
||||
# - LLDAP_DATABASE_URL=postgres://postgres-user:password@postgres-server/my-database
|
||||
@@ -264,27 +275,36 @@ folder for help with:
|
||||
- [Dolibarr](example_configs/dolibarr.md)
|
||||
- [Ejabberd](example_configs/ejabberd.md)
|
||||
- [Emby](example_configs/emby.md)
|
||||
- [Ergo IRCd](example_configs/ergo.md)
|
||||
- [Gitea](example_configs/gitea.md)
|
||||
- [Grafana](example_configs/grafana_ldap_config.toml)
|
||||
- [Hedgedoc](example_configs/hedgedoc.md)
|
||||
- [Jellyfin](example_configs/jellyfin.md)
|
||||
- [Jitsi Meet](example_configs/jitsi_meet.conf)
|
||||
- [KeyCloak](example_configs/keycloak.md)
|
||||
- [LibreNMS](example_configs/librenms.md)
|
||||
- [Matrix](example_configs/matrix_synapse.yml)
|
||||
- [Mealie](example_configs/mealie.md)
|
||||
- [MinIO](example_configs/minio.md)
|
||||
- [Nextcloud](example_configs/nextcloud.md)
|
||||
- [Nexus](example_configs/nexus.md)
|
||||
- [Organizr](example_configs/Organizr.md)
|
||||
- [Portainer](example_configs/portainer.md)
|
||||
- [PowerDNS Admin](example_configs/powerdns_admin.md)
|
||||
- [Proxmox VE](example_configs/proxmox.md)
|
||||
- [Rancher](example_configs/rancher.md)
|
||||
- [Seafile](example_configs/seafile.md)
|
||||
- [Shaarli](example_configs/shaarli.md)
|
||||
- [Squid](example_configs/squid.md)
|
||||
- [Syncthing](example_configs/syncthing.md)
|
||||
- [TheLounge](example_configs/thelounge.md)
|
||||
- [Vaultwarden](example_configs/vaultwarden.md)
|
||||
- [WeKan](example_configs/wekan.md)
|
||||
- [WG Portal](example_configs/wg_portal.env.example)
|
||||
- [WikiJS](example_configs/wikijs.md)
|
||||
- [XBackBone](example_configs/xbackbone_config.php)
|
||||
- [Zendto](example_configs/zendto.md)
|
||||
- [Zulip](example_configs/zulip.md)
|
||||
|
||||
## Migrating from SQLite
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ homepage = "https://github.com/lldap/lldap"
|
||||
license = "GPL-3.0-only"
|
||||
name = "lldap_app"
|
||||
repository = "https://github.com/lldap/lldap"
|
||||
version = "0.5.0-alpha"
|
||||
version = "0.5.0"
|
||||
include = ["src/**/*", "queries/**/*", "Cargo.toml", "../schema.graphql"]
|
||||
|
||||
[dependencies]
|
||||
@@ -14,7 +14,7 @@ anyhow = "1"
|
||||
base64 = "0.13"
|
||||
gloo-console = "0.2.3"
|
||||
gloo-file = "0.2.3"
|
||||
gloo-net = "0.2"
|
||||
gloo-net = "*"
|
||||
graphql_client = "0.10"
|
||||
http = "0.2"
|
||||
jwt = "0.13"
|
||||
@@ -23,9 +23,9 @@ serde = "1"
|
||||
serde_json = "1"
|
||||
url-escape = "0.1.1"
|
||||
validator = "=0.14"
|
||||
validator_derive = "0.16"
|
||||
validator_derive = "*"
|
||||
wasm-bindgen = "0.2"
|
||||
wasm-bindgen-futures = "0.4"
|
||||
wasm-bindgen-futures = "*"
|
||||
yew = "0.19.3"
|
||||
yew-router = "0.16"
|
||||
|
||||
@@ -47,13 +47,13 @@ features = [
|
||||
]
|
||||
|
||||
[dependencies.chrono]
|
||||
version = "0.4"
|
||||
version = "*"
|
||||
features = [
|
||||
"wasmbind"
|
||||
]
|
||||
|
||||
[dependencies.lldap_auth]
|
||||
version = "0.3"
|
||||
path = "../auth"
|
||||
features = [ "opaque_client" ]
|
||||
|
||||
[dependencies.image]
|
||||
|
||||
@@ -15,8 +15,8 @@
|
||||
src="https://cdn.jsdelivr.net/npm/bootstrap@5.1.1/dist/js/bootstrap.bundle.min.js"
|
||||
integrity="sha384-/bQdsTh/da6pkI1MST/rWKFNjaCP5gBSY4sEBT38Q/9RBh9AH40zEOg7Hlq2THRZ"
|
||||
crossorigin="anonymous"></script>
|
||||
<script
|
||||
src="https://cdn.jsdelivr.net/npm/bootstrap-dark-5@1.1.3/dist/js/darkmode.min.js"
|
||||
<script
|
||||
src="https://cdn.jsdelivr.net/npm/bootstrap-dark-5@1.1.3/dist/js/darkmode.min.js"
|
||||
integrity="sha384-A4SLs39X/aUfwRclRaXvNeXNBTLZdnZdHhhteqbYFS2jZTRD79tKeFeBn7SGXNpi"
|
||||
crossorigin="anonymous"></script>
|
||||
<link
|
||||
@@ -43,6 +43,23 @@
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<noscript>
|
||||
<!-- This will be displayed if the user doesn't have JavaScript enabled. -->
|
||||
LLDAP requires JavaScript, please switch to a compatible browser or
|
||||
enable it.
|
||||
</noscript>
|
||||
|
||||
<script>
|
||||
/* Detect if the user has WASM support. */
|
||||
if (typeof WebAssembly === 'undefined') {
|
||||
const pWASMMsg = document.createElement("p")
|
||||
pWASMMsg.innerHTML = `
|
||||
LLDAP requires WASM and JIT for JavaScript, please switch to a
|
||||
compatible browser or enable it.
|
||||
`
|
||||
document.body.appendChild(pWASMMsg)
|
||||
}
|
||||
</script>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
|
||||
@@ -13,8 +13,8 @@
|
||||
<script
|
||||
src="/static/bootstrap.bundle.min.js"
|
||||
integrity="sha384-/bQdsTh/da6pkI1MST/rWKFNjaCP5gBSY4sEBT38Q/9RBh9AH40zEOg7Hlq2THRZ"></script>
|
||||
<script
|
||||
src="/static/darkmode.min.js"
|
||||
<script
|
||||
src="/static/darkmode.min.js"
|
||||
integrity="sha384-A4SLs39X/aUfwRclRaXvNeXNBTLZdnZdHhhteqbYFS2jZTRD79tKeFeBn7SGXNpi"></script>
|
||||
<link
|
||||
rel="stylesheet"
|
||||
@@ -40,6 +40,23 @@
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<noscript>
|
||||
<!-- This will be displayed if the user doesn't have JavaScript enabled. -->
|
||||
LLDAP requires JavaScript, please switch to a compatible browser or
|
||||
enable it.
|
||||
</noscript>
|
||||
|
||||
<script>
|
||||
/* Detect if the user has WASM support. */
|
||||
if (typeof WebAssembly === 'undefined') {
|
||||
const pWASMMsg = document.createElement("p")
|
||||
pWASMMsg.innerHTML = `
|
||||
LLDAP requires WASM and JIT for JavaScript, please switch to a
|
||||
compatible browser or enable it.
|
||||
`
|
||||
document.body.appendChild(pWASMMsg)
|
||||
}
|
||||
</script>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
|
||||
@@ -128,9 +128,11 @@ impl CommonComponent<ChangePasswordForm> for ChangePasswordForm {
|
||||
Msg::SubmitNewPassword => {
|
||||
let mut rng = rand::rngs::OsRng;
|
||||
let new_password = self.form.model().password;
|
||||
let registration_start_request =
|
||||
opaque::client::registration::start_registration(&new_password, &mut rng)
|
||||
.context("Could not initiate password change")?;
|
||||
let registration_start_request = opaque::client::registration::start_registration(
|
||||
new_password.as_bytes(),
|
||||
&mut rng,
|
||||
)
|
||||
.context("Could not initiate password change")?;
|
||||
let req = registration::ClientRegistrationStartRequest {
|
||||
username: ctx.props().username.clone(),
|
||||
registration_start_request: registration_start_request.message,
|
||||
|
||||
@@ -117,7 +117,10 @@ impl CommonComponent<CreateUserForm> for CreateUserForm {
|
||||
let opaque::client::registration::ClientRegistrationStartResult {
|
||||
state,
|
||||
message,
|
||||
} = opaque::client::registration::start_registration(&password, &mut rng)?;
|
||||
} = opaque::client::registration::start_registration(
|
||||
password.as_bytes(),
|
||||
&mut rng,
|
||||
)?;
|
||||
let req = registration::ClientRegistrationStartRequest {
|
||||
username: user_id,
|
||||
registration_start_request: message,
|
||||
@@ -234,7 +237,7 @@ impl Component for CreateUserForm {
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group row mb-3">
|
||||
<label for="display-name"
|
||||
<label for="display_name"
|
||||
class="form-label col-4 col-form-label">
|
||||
{"Display name:"}
|
||||
</label>
|
||||
@@ -253,7 +256,7 @@ impl Component for CreateUserForm {
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group row mb-3">
|
||||
<label for="first-name"
|
||||
<label for="first_name"
|
||||
class="form-label col-4 col-form-label">
|
||||
{"First name:"}
|
||||
</label>
|
||||
@@ -272,7 +275,7 @@ impl Component for CreateUserForm {
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group row mb-3">
|
||||
<label for="last-name"
|
||||
<label for="last_name"
|
||||
class="form-label col-4 col-form-label">
|
||||
{"Last name:"}
|
||||
</label>
|
||||
|
||||
@@ -65,7 +65,7 @@ impl CommonComponent<ResetPasswordStep2Form> for ResetPasswordStep2Form {
|
||||
let mut rng = rand::rngs::OsRng;
|
||||
let new_password = self.form.model().password;
|
||||
let registration_start_request =
|
||||
opaque_registration::start_registration(&new_password, &mut rng)
|
||||
opaque_registration::start_registration(new_password.as_bytes(), &mut rng)
|
||||
.context("Could not initiate password change")?;
|
||||
let req = registration::ClientRegistrationStartRequest {
|
||||
username: self.username.clone().unwrap(),
|
||||
|
||||
@@ -67,7 +67,8 @@ pub struct UpdateUser;
|
||||
pub struct UserDetailsForm {
|
||||
common: CommonComponentParts<Self>,
|
||||
form: yew_form::Form<UserModel>,
|
||||
avatar: JsFile,
|
||||
// None means that the avatar hasn't changed.
|
||||
avatar: Option<JsFile>,
|
||||
reader: Option<FileReader>,
|
||||
/// True if we just successfully updated the user, to display a success message.
|
||||
just_updated: bool,
|
||||
@@ -81,6 +82,8 @@ pub enum Msg {
|
||||
FileSelected(File),
|
||||
/// The "Submit" button was clicked.
|
||||
SubmitClicked,
|
||||
/// The "Clear" button for the avatar was clicked.
|
||||
ClearAvatarClicked,
|
||||
/// A picked file finished loading.
|
||||
FileLoaded(String, Result<Vec<u8>>),
|
||||
/// We got the response from the server about our update message.
|
||||
@@ -102,7 +105,12 @@ impl CommonComponent<UserDetailsForm> for UserDetailsForm {
|
||||
match msg {
|
||||
Msg::Update => Ok(true),
|
||||
Msg::FileSelected(new_avatar) => {
|
||||
if self.avatar.file.as_ref().map(|f| f.name()) != Some(new_avatar.name()) {
|
||||
if self
|
||||
.avatar
|
||||
.as_ref()
|
||||
.and_then(|f| f.file.as_ref().map(|f| f.name()))
|
||||
!= Some(new_avatar.name())
|
||||
{
|
||||
let file_name = new_avatar.name();
|
||||
let link = ctx.link().clone();
|
||||
self.reader = Some(read_as_bytes(&new_avatar, move |res| {
|
||||
@@ -111,26 +119,32 @@ impl CommonComponent<UserDetailsForm> for UserDetailsForm {
|
||||
res.map_err(|e| anyhow::anyhow!("{:#}", e)),
|
||||
))
|
||||
}));
|
||||
self.avatar = JsFile {
|
||||
self.avatar = Some(JsFile {
|
||||
file: Some(new_avatar),
|
||||
contents: None,
|
||||
};
|
||||
});
|
||||
}
|
||||
Ok(true)
|
||||
}
|
||||
Msg::SubmitClicked => self.submit_user_update_form(ctx),
|
||||
Msg::ClearAvatarClicked => {
|
||||
self.avatar = Some(JsFile::default());
|
||||
Ok(true)
|
||||
}
|
||||
Msg::UserUpdated(response) => self.user_update_finished(response),
|
||||
Msg::FileLoaded(file_name, data) => {
|
||||
if let Some(file) = &self.avatar.file {
|
||||
if file.name() == file_name {
|
||||
let data = data?;
|
||||
if !is_valid_jpeg(data.as_slice()) {
|
||||
// Clear the selection.
|
||||
self.avatar = JsFile::default();
|
||||
bail!("Chosen image is not a valid JPEG");
|
||||
} else {
|
||||
self.avatar.contents = Some(data);
|
||||
return Ok(true);
|
||||
if let Some(avatar) = &mut self.avatar {
|
||||
if let Some(file) = &avatar.file {
|
||||
if file.name() == file_name {
|
||||
let data = data?;
|
||||
if !is_valid_jpeg(data.as_slice()) {
|
||||
// Clear the selection.
|
||||
self.avatar = None;
|
||||
bail!("Chosen image is not a valid JPEG");
|
||||
} else {
|
||||
avatar.contents = Some(data);
|
||||
return Ok(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -159,7 +173,7 @@ impl Component for UserDetailsForm {
|
||||
Self {
|
||||
common: CommonComponentParts::<Self>::create(),
|
||||
form: yew_form::Form::new(model),
|
||||
avatar: JsFile::default(),
|
||||
avatar: None,
|
||||
just_updated: false,
|
||||
reader: None,
|
||||
user: ctx.props().user.clone(),
|
||||
@@ -175,11 +189,13 @@ impl Component for UserDetailsForm {
|
||||
type Field = yew_form::Field<UserModel>;
|
||||
let link = &ctx.link();
|
||||
|
||||
let avatar_base64 = maybe_to_base64(&self.avatar).unwrap_or_default();
|
||||
let avatar_string = avatar_base64
|
||||
.as_deref()
|
||||
.or(self.user.avatar.as_deref())
|
||||
.unwrap_or("");
|
||||
let avatar_string = match &self.avatar {
|
||||
Some(avatar) => {
|
||||
let avatar_base64 = to_base64(avatar);
|
||||
avatar_base64.as_deref().unwrap_or("").to_owned()
|
||||
}
|
||||
None => self.user.avatar.as_deref().unwrap_or("").to_owned(),
|
||||
};
|
||||
html! {
|
||||
<div class="py-3">
|
||||
<form class="form">
|
||||
@@ -291,7 +307,7 @@ impl Component for UserDetailsForm {
|
||||
</label>
|
||||
<div class="col-8">
|
||||
<div class="row align-items-center">
|
||||
<div class="col-8">
|
||||
<div class="col-5">
|
||||
<input
|
||||
class="form-control"
|
||||
id="avatarInput"
|
||||
@@ -302,12 +318,27 @@ impl Component for UserDetailsForm {
|
||||
Self::upload_files(input.files())
|
||||
})} />
|
||||
</div>
|
||||
<div class="col-3">
|
||||
<button
|
||||
class="btn btn-secondary col-auto"
|
||||
id="avatarClear"
|
||||
disabled={self.common.is_task_running()}
|
||||
onclick={link.callback(|e: MouseEvent| {e.prevent_default(); Msg::ClearAvatarClicked})}>
|
||||
{"Clear"}
|
||||
</button>
|
||||
</div>
|
||||
<div class="col-4">
|
||||
<img
|
||||
id="avatarDisplay"
|
||||
src={format!("data:image/jpeg;base64, {}", avatar_string)}
|
||||
style="max-height:128px;max-width:128px;height:auto;width:auto;"
|
||||
alt="Avatar" />
|
||||
{
|
||||
if !avatar_string.is_empty() {
|
||||
html!{
|
||||
<img
|
||||
id="avatarDisplay"
|
||||
src={format!("data:image/jpeg;base64, {}", avatar_string)}
|
||||
style="max-height:128px;max-width:128px;height:auto;width:auto;"
|
||||
alt="Avatar" />
|
||||
}
|
||||
} else { html! {} }
|
||||
}
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -345,10 +376,10 @@ impl UserDetailsForm {
|
||||
if !self.form.validate() {
|
||||
bail!("Invalid inputs");
|
||||
}
|
||||
if let JsFile {
|
||||
if let Some(JsFile {
|
||||
file: Some(_),
|
||||
contents: None,
|
||||
} = &self.avatar
|
||||
}) = &self.avatar
|
||||
{
|
||||
bail!("Image file hasn't finished loading, try again");
|
||||
}
|
||||
@@ -376,7 +407,9 @@ impl UserDetailsForm {
|
||||
if base_user.last_name != model.last_name {
|
||||
user_input.lastName = Some(model.last_name);
|
||||
}
|
||||
user_input.avatar = maybe_to_base64(&self.avatar)?;
|
||||
if let Some(avatar) = &self.avatar {
|
||||
user_input.avatar = Some(to_base64(avatar)?);
|
||||
}
|
||||
// Nothing changed.
|
||||
if user_input == default_user_input {
|
||||
return Ok(false);
|
||||
@@ -398,8 +431,8 @@ impl UserDetailsForm {
|
||||
self.user.display_name = model.display_name;
|
||||
self.user.first_name = model.first_name;
|
||||
self.user.last_name = model.last_name;
|
||||
if let Some(avatar) = maybe_to_base64(&self.avatar)? {
|
||||
self.user.avatar = Some(avatar);
|
||||
if let Some(avatar) = &self.avatar {
|
||||
self.user.avatar = Some(to_base64(avatar)?);
|
||||
}
|
||||
self.just_updated = true;
|
||||
Ok(true)
|
||||
@@ -424,12 +457,12 @@ fn is_valid_jpeg(bytes: &[u8]) -> bool {
|
||||
.is_ok()
|
||||
}
|
||||
|
||||
fn maybe_to_base64(file: &JsFile) -> Result<Option<String>> {
|
||||
fn to_base64(file: &JsFile) -> Result<String> {
|
||||
match file {
|
||||
JsFile {
|
||||
file: None,
|
||||
contents: _,
|
||||
} => Ok(None),
|
||||
} => Ok(String::new()),
|
||||
JsFile {
|
||||
file: Some(_),
|
||||
contents: None,
|
||||
@@ -441,7 +474,7 @@ fn maybe_to_base64(file: &JsFile) -> Result<Option<String>> {
|
||||
if !is_valid_jpeg(data.as_slice()) {
|
||||
bail!("Chosen image is not a valid JPEG");
|
||||
}
|
||||
Ok(Some(base64::encode(data)))
|
||||
Ok(base64::encode(data))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,7 +6,7 @@ homepage = "https://github.com/lldap/lldap"
|
||||
license = "GPL-3.0-only"
|
||||
name = "lldap_auth"
|
||||
repository = "https://github.com/lldap/lldap"
|
||||
version = "0.3.0"
|
||||
version = "0.4.0"
|
||||
|
||||
[features]
|
||||
default = ["opaque_server", "opaque_client"]
|
||||
@@ -18,23 +18,23 @@ js = []
|
||||
rust-argon2 = "0.8"
|
||||
curve25519-dalek = "3"
|
||||
digest = "0.9"
|
||||
generic-array = "0.14"
|
||||
generic-array = "*"
|
||||
rand = "0.8"
|
||||
serde = "1"
|
||||
serde = "*"
|
||||
sha2 = "0.9"
|
||||
thiserror = "1"
|
||||
thiserror = "*"
|
||||
|
||||
[dependencies.opaque-ke]
|
||||
version = "0.6"
|
||||
|
||||
[dependencies.chrono]
|
||||
version = "0.4"
|
||||
version = "*"
|
||||
features = [ "serde" ]
|
||||
|
||||
# For WASM targets, use the JS getrandom.
|
||||
[target.'cfg(not(target_arch = "wasm32"))'.dependencies.getrandom]
|
||||
version = "0.2"
|
||||
features = ["js"]
|
||||
|
||||
[target.'cfg(target_arch = "wasm32")'.dependencies.getrandom]
|
||||
version = "0.2"
|
||||
features = ["js"]
|
||||
|
||||
@@ -77,10 +77,10 @@ pub mod client {
|
||||
pub use opaque_ke::ClientRegistrationFinishParameters;
|
||||
/// Initiate the registration negotiation.
|
||||
pub fn start_registration<R: RngCore + CryptoRng>(
|
||||
password: &str,
|
||||
password: &[u8],
|
||||
rng: &mut R,
|
||||
) -> AuthenticationResult<ClientRegistrationStartResult> {
|
||||
Ok(ClientRegistration::start(rng, password.as_bytes())?)
|
||||
Ok(ClientRegistration::start(rng, password)?)
|
||||
}
|
||||
|
||||
/// Finalize the registration negotiation.
|
||||
|
||||
@@ -20,7 +20,7 @@ LLDAP has a command that will connect to a target database and initialize the
|
||||
schema. If running with docker, run the following command to use your active
|
||||
instance (this has the benefit of ensuring your container has access):
|
||||
|
||||
```
|
||||
```sh
|
||||
docker exec -it <LLDAP container name> /app/lldap create_schema -d <Target database url>
|
||||
```
|
||||
|
||||
@@ -34,7 +34,7 @@ databases (SQLite in this example) will give an error if LLDAP is in the middle
|
||||
statements. There are various ways to do this, but a simple enough way is filtering a
|
||||
whole database dump. This repo contains [a script](/scripts/sqlite_dump_commands.sh) to generate SQLite commands for creating an appropriate dump:
|
||||
|
||||
```
|
||||
```sh
|
||||
./sqlite_dump_commands.sh | sqlite3 /path/to/lldap/config/users.db > /path/to/dump.sql
|
||||
```
|
||||
|
||||
@@ -49,8 +49,9 @@ a transaction in case one of the statements fail.
|
||||
PostgreSQL uses a different hex string format. The command below should switch SQLite
|
||||
format to PostgreSQL format, and wrap it all in a transaction:
|
||||
|
||||
```
|
||||
```sh
|
||||
sed -i -r -e "s/X'([[:xdigit:]]+'[^'])/'\\\x\\1/g" \
|
||||
-e ":a; s/(INSERT INTO user_attribute_schema\(.*\) VALUES\(.*),1([^']*\);)$/\1,true\2/; s/(INSERT INTO user_attribute_schema\(.*\) VALUES\(.*),0([^']*\);)$/\1,false\2/; ta" \
|
||||
-e '1s/^/BEGIN;\n/' \
|
||||
-e '$aCOMMIT;' /path/to/dump.sql
|
||||
```
|
||||
@@ -58,11 +59,11 @@ sed -i -r -e "s/X'([[:xdigit:]]+'[^'])/'\\\x\\1/g" \
|
||||
### To MySQL
|
||||
|
||||
MySQL mostly cooperates, but it gets some errors if you don't escape the `groups` table. It also uses
|
||||
backticks to escape table name instead of quotes. Run the
|
||||
backticks to escape table name instead of quotes. Run the
|
||||
following command to wrap all table names in backticks for good measure, and wrap the inserts in
|
||||
a transaction:
|
||||
|
||||
```
|
||||
```sh
|
||||
sed -i -r -e 's/^INSERT INTO "?([a-zA-Z0-9_]+)"?/INSERT INTO `\1`/' \
|
||||
-e '1s/^/START TRANSACTION;\n/' \
|
||||
-e '$aCOMMIT;' \
|
||||
@@ -74,7 +75,7 @@ sed -i -r -e 's/^INSERT INTO "?([a-zA-Z0-9_]+)"?/INSERT INTO `\1`/' \
|
||||
While MariaDB is supposed to be identical to MySQL, it doesn't support timezone offsets on DATETIME
|
||||
strings. Use the following command to remove those and perform the additional MySQL sanitization:
|
||||
|
||||
```
|
||||
```sh
|
||||
sed -i -r -e "s/([^']'[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{9})\+00:00'([^'])/\1'\2/g" \
|
||||
-e 's/^INSERT INTO "?([a-zA-Z0-9_]+)"?/INSERT INTO `\1`/' \
|
||||
-e '1s/^/START TRANSACTION;\n/' \
|
||||
|
||||
@@ -10,7 +10,7 @@ $conf['plugin']['authldap']['server'] = 'ldap://lldap_server:3890'; #IP of
|
||||
$conf['plugin']['authldap']['usertree'] = 'ou=people,dc=example,dc=com';
|
||||
$conf['plugin']['authldap']['grouptree'] = 'ou=groups, dc=example, dc=com';
|
||||
$conf['plugin']['authldap']['userfilter'] = '(&(uid=%{user})(objectClass=person))';
|
||||
$conf['plugin']['authldap']['groupfilter'] = '(objectClass=group)';
|
||||
$conf['plugin']['authldap']['groupfilter'] = '(&(member=%{dn})(objectClass=groupOfUniqueNames))';
|
||||
$conf['plugin']['authldap']['attributes'] = array('cn', 'displayname', 'mail', 'givenname', 'objectclass', 'sn', 'uid', 'memberof');
|
||||
$conf['plugin']['authldap']['version'] = 3;
|
||||
$conf['plugin']['authldap']['binddn'] = 'cn=admin,ou=people,dc=example,dc=com';
|
||||
|
||||
22
example_configs/ergo.md
Normal file
22
example_configs/ergo.md
Normal file
@@ -0,0 +1,22 @@
|
||||
# Basic LDAP auth for an Ergo IRC server
|
||||
|
||||
[Main documentation here.](https://github.com/ergochat/ergo-ldap)
|
||||
|
||||
For simple user auth prepare a ldap-config.yaml with the following settings
|
||||
|
||||
```
|
||||
host: "127.0.0.1"
|
||||
port: 3890
|
||||
timeout: 30s
|
||||
|
||||
# uncomment for TLS / LDAPS:
|
||||
# use-ssl: true
|
||||
|
||||
bind-dn: "uid=%s,ou=people,dc=example,dc=org"
|
||||
```
|
||||
|
||||
Then add the compiled ergo-ldap program to your Ergo folder and make sure it can be executed by the same user your Ergo IRCd runs as.
|
||||
|
||||
Follow the instructions in the main Ergo config file's accounts section on how to execute an external auth program.
|
||||
|
||||
Make sure SASL auth is enabled and then restart Ergo to enable LDAP linked SASL auth.
|
||||
@@ -37,7 +37,7 @@ search_base_dns = ["dc=example,dc=org"]
|
||||
[servers.attributes]
|
||||
member_of = "memberOf"
|
||||
email = "mail"
|
||||
name = "givenName"
|
||||
name = "displayName"
|
||||
surname = "sn"
|
||||
username = "uid"
|
||||
|
||||
|
||||
@@ -6,8 +6,8 @@ Home Assistant configures ldap auth via the [Command Line Auth Provider](https:/
|
||||
|
||||
The [auth script](lldap-ha-auth.sh) attempts to authenticate a user against an LLDAP server, using credentials provided via `username` and `password` environment variables. The first argument must be the URL of your LLDAP server, accessible from Home Assistant. You can provide an additional optional argument to confine allowed logins to a single group. The script will output the user's display name as the `name` variable, if not empty.
|
||||
|
||||
1. Copy the [auth script](lldap-ha-auth.sh) to your home assistant instance. In this example, we use `/config/lldap-auth.sh`.
|
||||
- Set the script as executable by running `chmod +x /config/lldap-auth-sh`
|
||||
1. Copy the [auth script](lldap-ha-auth.sh) to your home assistant instance. In this example, we use `/config/lldap-ha-auth.sh`.
|
||||
- Set the script as executable by running `chmod +x /config/lldap-ha-auth.sh`
|
||||
2. Add the following to your configuration.yaml in Home assistant:
|
||||
```yaml
|
||||
homeassistant:
|
||||
@@ -15,7 +15,7 @@ homeassistant:
|
||||
# Ensure you have the homeassistant provider enabled if you want to continue using your existing accounts
|
||||
- type: homeassistant
|
||||
- type: command_line
|
||||
command: /config/lldap-auth.sh
|
||||
command: /config/lldap-ha-auth.sh
|
||||
# Only allow users in the 'homeassistant_user' group to login.
|
||||
# Change to ["https://lldap.example.com"] to allow all users
|
||||
args: ["https://lldap.example.com", "homeassistant_user"]
|
||||
|
||||
@@ -37,9 +37,9 @@ Otherwise, just use:
|
||||
```
|
||||
### Admin Base DN
|
||||
|
||||
The DN of your admin group. If you have `media_admin` as your group you would use:
|
||||
The DN to search for your admins.
|
||||
```
|
||||
cn=media_admin,ou=groups,dc=example,dc=com
|
||||
ou=people,dc=example,dc=com
|
||||
```
|
||||
|
||||
### Admin Filter
|
||||
@@ -49,8 +49,15 @@ that), use:
|
||||
```
|
||||
(memberof=cn=media_admin,ou=groups,dc=example,dc=com)
|
||||
```
|
||||
Bear in mind that admins must also be a member of the users group if you use one.
|
||||
|
||||
Otherwise, you can use LLDAP's admin group:
|
||||
```
|
||||
(memberof=cn=lldap_admin,ou=groups,dc=example,dc=com)
|
||||
```
|
||||
|
||||
## Password change
|
||||
To allow changing Passwords via Jellyfin the following things are required
|
||||
- The bind user needs to have the group lldap_password_manager (changing passwords of members of the group lldap_admin does not work to prevent privilege escalation)
|
||||
- Check `Allow Password Change`
|
||||
- `LDAP Password Attribute` Needs to be set to `userPassword`
|
||||
|
||||
@@ -62,3 +62,11 @@ Once the groups are synchronized, go to "Manage > Groups" on the left. Click on
|
||||
|
||||
Assign the role "admin" to the group. Now you can log in as the LLDAP admin to
|
||||
the KeyCloak admin console.
|
||||
|
||||
## Fixing duplicate names or missing First Names for users
|
||||
|
||||
Since Keycloak and LLDAP use different attributes for different parts of a user's name, you may see duplicated or missing names for users in Keycloak. To fix this, update the attribute mappings:
|
||||
|
||||
Go back to "User Federation", edit your LDAP integration and click on the "Mappers" tab.
|
||||
|
||||
Find or create the "first name" mapper (it should have type `user-attribute-ldap-mapper`) and ensure the "LDAP Attribute" setting is set to `givenname`. Keycloak may have defaulted to `cn` which LLDAP uses for the "Display Name" of a user.
|
||||
|
||||
193
example_configs/librenms.md
Normal file
193
example_configs/librenms.md
Normal file
@@ -0,0 +1,193 @@
|
||||
# Configuration for LibreNMS
|
||||
|
||||
You can either configure LibreNMS from the webui or from the command line. This is a list of the variables that you should set.
|
||||
|
||||
## Essential
|
||||
|
||||
## auth_ldap_uid_attribute
|
||||
|
||||
```
|
||||
uid
|
||||
```
|
||||
|
||||
This sets 'uid' as the unique ldap attribute for users.
|
||||
|
||||
## auth_ldap_groupmemberattr
|
||||
|
||||
```
|
||||
member
|
||||
```
|
||||
|
||||
## auth_ldap_groups
|
||||
|
||||
```'
|
||||
{"nms_admin": {"level": 10}}'
|
||||
```
|
||||
|
||||
or
|
||||
|
||||
```
|
||||
auth_ldap_groups.nms_admin.level: 10
|
||||
```
|
||||
|
||||
These are both the same.
|
||||
|
||||
This example sets the group nms_admin as Admin (level 10).
|
||||
Set others to match more groups at different levels.
|
||||
|
||||
## auth_ldap_starttls
|
||||
|
||||
```
|
||||
false
|
||||
```
|
||||
|
||||
## auth_ldap_server
|
||||
|
||||
```
|
||||
[lldap server ip]
|
||||
```
|
||||
|
||||
## auth_ldap_port
|
||||
|
||||
```
|
||||
3890
|
||||
```
|
||||
|
||||
## auth_ldap_suffix
|
||||
|
||||
```
|
||||
,ou=people,dc=example,dc=com
|
||||
```
|
||||
|
||||
Not sure if the case of people actually matters.
|
||||
Make sure you keep the initial comma.
|
||||
|
||||
## auth_ldap_groupbase
|
||||
|
||||
```
|
||||
ou=groups,dc=example,dc=com
|
||||
```
|
||||
|
||||
## auth_mechanism
|
||||
|
||||
```
|
||||
ldap
|
||||
```
|
||||
Be careful with this as you will lock yourself out if ldap does not work correctly. Set back to 'mysql' to turn ldap off.
|
||||
|
||||
### auth_ldap_require_groupmembership
|
||||
|
||||
```
|
||||
false
|
||||
```
|
||||
|
||||
## Testing
|
||||
|
||||
Use the test script to make sure it works.
|
||||
```
|
||||
./script/auth_test.php -u <user>
|
||||
```
|
||||
Make sure the level is correctly populated. Should look like this:
|
||||
|
||||
```
|
||||
librenms:/opt/librenms# ./scripts/auth_test.php -uadmin
|
||||
Authentication Method: ldap
|
||||
Password:
|
||||
Authenticate user admin:
|
||||
AUTH SUCCESS
|
||||
|
||||
User (admin):
|
||||
username => admin
|
||||
realname => Administrator
|
||||
user_id => admin
|
||||
email => admin@example.com
|
||||
level => 10
|
||||
Groups: cn=nms_admin,ou=groups,dc=example,dc=com
|
||||
```
|
||||
|
||||
## Setting variables
|
||||
|
||||
### Web UI
|
||||
|
||||
You can set all the varibles in the web UI in: Settings -> Authentication -> LDAP Settings
|
||||
|
||||
### Command line
|
||||
|
||||
You can use the lnms command to *get* config options like this:
|
||||
```
|
||||
lnms config:get auth_ldap_uid_attribute
|
||||
```
|
||||
|
||||
You can use the lnms command to *set* config options like this:
|
||||
```
|
||||
lnms config:set auth_ldap_uid_attribute uid
|
||||
```
|
||||
|
||||
Read more [here](https://docs.librenms.org/Support/Configuration/)
|
||||
|
||||
### Pre load configuration for Docker
|
||||
|
||||
You can create a file named: /data/config/ldap.yaml and place your variables in there.
|
||||
|
||||
```
|
||||
librenms:/opt/librenms# cat /data/config/auth.yaml
|
||||
auth_mechanism: ldap
|
||||
|
||||
auth_ldap_server: 172.17.0.1
|
||||
auth_ldap_port: 3890
|
||||
auth_ldap_version: 3
|
||||
auth_ldap_suffix: ,ou=people,dc=example,dc=com
|
||||
auth_ldap_groupbase: ou=groups,dc=example,dc=com
|
||||
|
||||
auth_ldap_prefix: uid=
|
||||
auth_ldap_starttls: False
|
||||
auth_ldap_attr: {"uid": "uid"}
|
||||
auth_ldap_uid_attribute: uid
|
||||
auth_ldap_groups: {"nms_admin": {"level": 10}}
|
||||
auth_ldap_groupmemberattr: member
|
||||
auth_ldap_require_groupmembership: False
|
||||
auth_ldap_debug: False
|
||||
|
||||
auth_ldap_group: cn=groupname,ou=groups,dc=example,dc=com
|
||||
auth_ldap_groupmembertype: username
|
||||
auth_ldap_timeout: 5
|
||||
auth_ldap_emailattr: mail
|
||||
auth_ldap_userdn: True
|
||||
auth_ldap_userlist_filter:
|
||||
auth_ldap_wildcard_ou: False
|
||||
```
|
||||
|
||||
Read more [here](https://github.com/librenms/docker#configuration-management)
|
||||
|
||||
## Issue with current LibreNMS
|
||||
|
||||
The current version (23.7.0 at the time of writing) does not support lldap. A fix has been accepted to LibreNMS so the next version should just work.
|
||||
|
||||
[Link to the commit](https://github.com/librenms/librenms/commit/a71ca98fac1a75753b102be8b3644c4c3ee1a624)
|
||||
|
||||
If you want to apply the fix manually, run git apply with this patch.
|
||||
|
||||
```
|
||||
diff --git a/LibreNMS/Authentication/LdapAuthorizer.php b/LibreNMS/Authentication/LdapAuthorizer.php
|
||||
index 5459759ab..037a7382b 100644
|
||||
--- a/LibreNMS/Authentication/LdapAuthorizer.php
|
||||
+++ b/LibreNMS/Authentication/LdapAuthorizer.php
|
||||
@@ -233,7 +233,7 @@ class LdapAuthorizer extends AuthorizerBase
|
||||
$entries = ldap_get_entries($connection, $search);
|
||||
foreach ($entries as $entry) {
|
||||
$user = $this->ldapToUser($entry);
|
||||
- if ((int) $user['user_id'] !== (int) $user_id) {
|
||||
+ if ($user['user_id'] != $user_id) {
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -360,7 +360,7 @@ class LdapAuthorizer extends AuthorizerBase
|
||||
return [
|
||||
'username' => $entry['uid'][0],
|
||||
'realname' => $entry['cn'][0],
|
||||
- 'user_id' => (int) $entry[$uid_attr][0],
|
||||
+ 'user_id' => $entry[$uid_attr][0],
|
||||
'email' => $entry[Config::get('auth_ldap_emailattr', 'mail')][0],
|
||||
'level' => $this->getUserlevel($entry['uid'][0]),
|
||||
];
|
||||
```
|
||||
28
example_configs/mealie.md
Normal file
28
example_configs/mealie.md
Normal file
@@ -0,0 +1,28 @@
|
||||
# Mealie
|
||||
|
||||
Configuration is done solely with environmental variables in the mealie-api docker-compose config:
|
||||
|
||||
## Note
|
||||
[LDAP integration in Mealie currently only works with the nightly branch](https://github.com/hay-kot/mealie/issues/2402#issuecomment-1560176528), so `hkotel/mealie:api-nightly` and `hkotel/mealie:frontend-nightly` rather than the current "stable" release of `v1.0.0beta-5`
|
||||
|
||||
## Configuration
|
||||
|
||||
The following config should let you login with either members of the `mealie` group as a user, or as an admin user with members of the `mealie-admin` group.
|
||||
|
||||
Mealie first checks credentials in the `mealie` group to authenticate, then checks for the presence of the user in the `mealie-admin` group and elevates that account to admin status if present, therefore for any account to be an admin account it must belong in both the `mealie` group and the `mealie-admin` group.
|
||||
|
||||
It is recommended to create a `readonly_user` and add them to the `lldap_strict_readonly` group to bind with.
|
||||
|
||||
```yaml
|
||||
- LDAP_AUTH_ENABLED=true
|
||||
- LDAP_SERVER_URL=ldap://lldap:3890
|
||||
- LDAP_TLS_INSECURE=true ## Only required for LDAPS with a self-signed certificate
|
||||
- LDAP_BASE_DN=ou=people,dc=example,dc=com
|
||||
- LDAP_USER_FILTER=(memberof=cn=mealie,ou=groups,dc=example,dc=com)
|
||||
- LDAP_ADMIN_FILTER=(memberof=cn=mealie-admin,ou=groups,dc=example,dc=com)
|
||||
- LDAP_QUERY_BIND=cn=readonly_user,ou=people,dc=example,dc=com
|
||||
- LDAP_QUERY_PASSWORD=READONLY_USER_PASSWORD
|
||||
- LDAP_ID_ATTRIBUTE=uid
|
||||
- LDAP_NAME_ATTRIBUTE=displayName
|
||||
- LDAP_MAIL_ATTRIBUTE=mail
|
||||
```
|
||||
37
example_configs/minio.md
Normal file
37
example_configs/minio.md
Normal file
@@ -0,0 +1,37 @@
|
||||
# MinIO Configuration
|
||||
|
||||
MinIO is a High-Performance Object Storage released under GNU Affero General Public License v3. 0. It is API compatible with the Amazon S3 cloud storage service. This example assists with basic LDAP configuration and policy attachment.
|
||||
|
||||
## LDAP Config
|
||||
|
||||
### Navigation
|
||||
|
||||
- Login to the WebUI as a consoleAdmin user
|
||||
- Navigate to `Administrator > Identity > LDAP`
|
||||
- Click `Edit Configuration`
|
||||
|
||||
### Configuration Options
|
||||
|
||||
- Server Insecure: Enabled
|
||||
- Server Address: Hostname or IP for your LLDAP host
|
||||
- Lookup Bind DN: `uid=admin,ou=people,dc=example,dc=com`
|
||||
- It is recommended that you create a separate user account (e.g, `bind_user`) instead of `admin` for sharing Bind credentials with other services. The `bind_user` should be a member of the `lldap_strict_readonly` group to limit access to your LDAP configuration in LLDAP.
|
||||
- Lookup Bind Password: The password for the user referenced above
|
||||
- User DN Search Base: `ou=people,dc=example,dc=com`
|
||||
- User DN Search Filter: `(&(uid=%s)(memberOf=cn=minio_admin,ou=groups,dc=example,dc=com))`
|
||||
- This search filter will only allow users that are members of the `minio_admin` group to authenticate. To allow all lldap users, this filter can be used instead `(uid=%s)`
|
||||
- Group Search Base DN: `ou=groups,dc=example,dc=com`
|
||||
- Group Search Filter: `(member=%d)`
|
||||
|
||||
### Enable LDAP
|
||||
|
||||
> Note there appears to be a bug in some versions of MinIO where LDAP is enabled and working, however the configuration UI reports that it is not enabled.
|
||||
|
||||
Now, you can enable LDAP authentication by clicking the `Enable LDAP` button, a restart of the service or container is needed. With this configuration, LLDAP users will be able to log in to MinIO now. However they will not be able to do anything, as we need to attach policies giving permissions to users.
|
||||
|
||||
## Policy Attachment
|
||||
|
||||
Creating MinIO policies is outside of the scope for this document, but it is well documented by MinIO [here](https://min.io/docs/minio/linux/administration/identity-access-management/policy-based-access-control.html). Policies are written in JSON, are extremely flexible, and can be configured to be very granular. In this example we will be using one of the built-in Policies, `consoleAdmin`. We will be applying these policies with the `mc` command line utility.
|
||||
|
||||
- Alias your MinIO instance: `mc alias set myMinIO http://<your-minio-address>:<your-minio-api-port> admin <your-admin-password>`
|
||||
- Attach a policy to your LDAP group: `mc admin policy attach myMinIO consoleAdmin --group='cn=minio_admin,ou=groups,dc=example,dc=com'`
|
||||
@@ -70,7 +70,7 @@ _The first two can be any string you'd like to identify the connection with. The
|
||||
|
||||
* *_Do not_* use commas in the Nextcloud Social Login app scope! This caused many issues for me.
|
||||
* Be sure you update your Authelia `configuration.yml`. Specifically, the line: `redirect_uris`. The new URL should be
|
||||
`https://auth.example.com/index.php/apps/sociallogin/custom_oidc/Authelia`.
|
||||
`https://nextcloud.example.com/apps/sociallogin/custom_oidc/Authelia`, in some cases the URL also contains the index.php file and has to look like this `https://nextcloud.example.com/index.php/apps/sociallogin/custom_oidc/Authelia`. Check if your nextcloud has index.php in it's URL because if it has this won't work without and if it hasn't the link with index.php won't work.
|
||||
* The final field in the URL (Authelia) needs to be the same value you used in the Social Login "Internal Name" field.
|
||||
* If you've setup LLDAP correctly in nextcloud, the last dropdown for _Default Group_ should show you the `nextcloud_users` group you setup in LLDAP.
|
||||
|
||||
|
||||
39
example_configs/powerdns_admin.md
Normal file
39
example_configs/powerdns_admin.md
Normal file
@@ -0,0 +1,39 @@
|
||||
# Configuration for PowerDNS Admin
|
||||
|
||||
## Navigate
|
||||
|
||||
- Login to PowerDNS Admin
|
||||
- Navigate to: `Administration > Settings > Authentication`
|
||||
- Select the `LDAP` tab of the `Authentication Settings`
|
||||
|
||||
## LDAP Config
|
||||
|
||||
- Enable LDAP Authentication: Checked
|
||||
- Type: OpenLDAP
|
||||
|
||||
### Administrator Info
|
||||
|
||||
- LDAP URI: `ldap://<your-lldap-ip-or-hostname>:3890`
|
||||
- LDAP Base DN: `ou=people,dc=example,dc=com`
|
||||
- LDAP admin username: `uid=admin,ou=people,dc=example,dc=com`
|
||||
- It is recommended that you create a separate user account (e.g, `bind_user`) instead of `admin` for sharing Bind credentials with other services. The `bind_user` should be a member of the `lldap_strict_readonly` group to limit access to your LDAP configuration in LLDAP.
|
||||
- LDAP admin password: password of the user specified above
|
||||
|
||||
### Filters
|
||||
|
||||
- Basic filter: `(objectClass=person)`
|
||||
- Username field: `uid`
|
||||
- Group filter: `(objectClass=groupOfUniqueNames)`
|
||||
- Group name field: `member`
|
||||
|
||||
### Group Security (Optional)
|
||||
|
||||
> If Group Security is disabled, all users authenticated via LDAP will be given the "User" role.
|
||||
|
||||
Group Security is an optional configuration for LLDAP users. It provides a simple 1:1 mapping between LDAP groups, and PowerDNS roles.
|
||||
|
||||
- Status: On
|
||||
- Admin group: `cn=dns_admin,ou=groups,dc=example,dc=com`
|
||||
- Operator group: `cn=dns_operator,ou=groups,dc=example,dc=com`
|
||||
- User group: `cn=dns_user,ou=groups,dc=example,dc=com`
|
||||
|
||||
83
example_configs/proxmox.md
Normal file
83
example_configs/proxmox.md
Normal file
@@ -0,0 +1,83 @@
|
||||
# Proxmox VE Example
|
||||
|
||||
Proxmox Virtual Environment is a hyper-converged infrastructure open-source software. It is a hosted hypervisor that can run operating systems including Linux and Windows on x64 hardware. In this example we will setup user and group syncronization, with two example groups `proxmox_user` and `proxmox_admin`. This example was made using Proxmox VE 8.0.3.
|
||||
|
||||
## Navigation
|
||||
|
||||
- From the `Server View` open the `Datacenter` page
|
||||
- Then in this page, open the `Permissions > Realms` menu
|
||||
- In this menu, select `Add > LDAP Server`
|
||||
|
||||
## General Options
|
||||
|
||||
- Realm: The internal proxmox name for this authentication method
|
||||
- Base Domain Name: `dc=example,dc=com`
|
||||
- User Attribute Name: `uid`
|
||||
- Server: Your LLDAP hostname or IP
|
||||
- Port: `3890`
|
||||
- SSL: Leave unchecked unless you're using LDAPS
|
||||
- Comment: This field will be exposed as the "name" in the login page
|
||||
|
||||
## Sync Options
|
||||
|
||||
- Bind User: `uid=admin,ou=people,dc=example,dc=com`
|
||||
- It is recommended that you create a separate user account (e.g, `bind_user`) instead of `admin` for sharing Bind credentials with other services. The `bind_user` should be a member of the `lldap_strict_readonly` group to limit access to your LDAP configuration in LLDAP.
|
||||
- Bind Password: password of the user specified above
|
||||
- E-Mail Attribute: `mail`
|
||||
- Groupname attr: `cn`
|
||||
- User Filter: `(&(objectClass=person)(|(memberof=cn=proxmox_user,ou=groups,dc=example,dc=com)(memberof=cn=proxmox_admin,ou=groups,dc=example,dc=com)))`
|
||||
- This filter will only copy users that are members of the `proxmox_user` or `proxmox_admin` groups. If you want to enable all users in lldap, this filter can be used: `(objectClass=person)`
|
||||
- Group Filter: `(&(objectClass=groupofuniquenames)(|(cn=proxmox_user)(cn=proxmox_admin)))`
|
||||
- This filter will only copy the `proxmox_user` or `proxmox_admin` groups explicitly. If you want to sync all groups, this filter can be used: `(objectClass=groupofnames)`
|
||||
- Default Sync Options:
|
||||
- Scope: `Users and Groups`
|
||||
- Remove Vanished Options
|
||||
- Entry: Checked
|
||||
- Properties: Checked
|
||||
|
||||
## Syncronizing
|
||||
|
||||
Proxmox operates LDAP authentication by syncronizing with your lldap server to a local database. This sync can be triggered manually, and on a scheduled basis. Proxmox also offers a preview feature, which will report any changes to the local DB from a sync, without applying the changes. It is highly recommended to run a preview on your first syncronization after making any filter changes, to ensure syncronization is happening as expected.
|
||||
|
||||
### First Sync
|
||||
|
||||
- With the options saved, and from the `Permissions > Realms` page, select the LDAP realm you just created and click `Sync`
|
||||
- At the sync dialog, click the Preview button, and carefully check the output to ensure all the users and groups you expect are seen, and that nothing is being remove unexpectedly.
|
||||
- Once the preview output is matching what we expect, we can click the Sync button, on the `Realm Sync` dialog for the ldap realm we created.
|
||||
|
||||
### Scheduled Sync (Optional)
|
||||
|
||||
- Once we are confident that LDAP syncronizing is working as expected, this can be scheduled as a job from the `Permissions > Realms` page.
|
||||
- On the second half of the page, click `Add` under `Realm Sync Jobs`
|
||||
- Set a schedule for this job and click `Create`
|
||||
|
||||
## ACLs
|
||||
|
||||
Once you have users and groups syncronized from lldap, it is necessary to grant some perimssions to these users or groups so that they are able to use Proxmox. Proxmox handles this with a filesystem-like tree structure, and "roles" which are collections of permissions. In our basic example, we will grant the built-in `Administrator` role to our `proxmox_admin` role to the entire system. Then we will also grant the `proxmox_user` group several roles with different paths so they can clone and create VMs within a specific resource pool (`UserVMs`), but are otherwise restricted from editing or deleting other resources.
|
||||
|
||||
> Note that Promox appends the realm name to groups when syncing, so if you named your realm `lldap` the groups as synced will be `proxmox_user-lldap` and `proxmox_admin-lldap`
|
||||
|
||||
### Administrator
|
||||
|
||||
- From the Datacenter pane, select the `Permissions` menu page.
|
||||
- Click `Add > Group Permission`
|
||||
- Path: Type or select `/`
|
||||
- Group: Type or select the admin group that has syncronized (`proxmox_admin-lldap` in our example)
|
||||
- Role: `Administrator`
|
||||
- Finish by clicking the `Add` button and this access should now be granted
|
||||
|
||||
### User Role
|
||||
|
||||
> This example assumes we have created Resource Pools named `UserVMs` and `Templates`
|
||||
|
||||
- From the Datacenter pane, select the `Permissions` menu page.
|
||||
- We will be adding six rules in total, for each one clicking `Add > Group Permission`
|
||||
- Path: `/pool/UserVMs`, Group: `proxmox_user-lldap`, Role: PVEVMAdmin
|
||||
- Path: `/pool/UserVMs`, Group: `proxmox_user-lldap`, Role: PVEPoolAdmin
|
||||
- Path: `/pool/Templates`, Group: `proxmox_user-lldap`, Role: PVEPoolUser
|
||||
- Path: `/pool/Templates`, Group: `proxmox_user-lldap`, Role: PVETemplateUser
|
||||
- The following two rules are based on a default setup of Proxmox, and may need to be updated based on your networking and storage configuration
|
||||
- Path: `/sdn/zones/localnetwork`, Group: `proxmox_user-lldap`, Role: PVESDNUser
|
||||
- Path: `/storage/local-lvm`, Group: `proxmox_user-lldap`, Role: PVEDatastoreUser
|
||||
|
||||
That completes our basic example. The ACL rules in Proxmox are very flexible though, and custom roles can be created as well. The Proxmox documentation on [User Management](https://pve.proxmox.com/wiki/User_Management) goes into more depth if you wish to write a policy that better fits your use case.
|
||||
57
example_configs/squid.md
Normal file
57
example_configs/squid.md
Normal file
@@ -0,0 +1,57 @@
|
||||
# Squid
|
||||
|
||||
[Squid](http://www.squid-cache.org/) is a caching HTTP/HTTPS proxy.
|
||||
|
||||
This guide will show you how to configure it to allow any user of the group `proxy` to use the proxy server.
|
||||
|
||||
The configuration file `/etc/squid/squid.conf`
|
||||
```
|
||||
auth_param basic program /usr/lib/squid/basic_ldap_auth -b "dc=example,dc=com" -D "uid=admin,ou=people,dc=example,dc=com" -W /etc/squid/ldap_password -f "(&(memberOf=uid=proxy,ou=groups,dc=example,dc=com)(uid=%s))" -H ldap://IP_OF_LLDAP_SERVER:3890
|
||||
acl localnet src 0.0.0.1-0.255.255.255 # RFC 1122 "this" network (LAN)
|
||||
acl localnet src 10.0.0.0/8 # RFC 1918 local private network (LAN)
|
||||
acl localnet src 100.64.0.0/10 # RFC 6598 shared address space (CGN)
|
||||
acl localnet src 169.254.0.0/16 # RFC 3927 link-local (directly plugged) machines
|
||||
acl localnet src 172.16.0.0/12 # RFC 1918 local private network (LAN)
|
||||
acl localnet src 192.168.0.0/16 # RFC 1918 local private network (LAN)
|
||||
acl localnet src fc00::/7 # RFC 4193 local private network range
|
||||
acl localnet src fe80::/10 # RFC 4291 link-local (directly plugged) machines
|
||||
acl SSL_ports port 443
|
||||
acl Safe_ports port 80 # http
|
||||
acl Safe_ports port 21 # ftp
|
||||
acl Safe_ports port 443 # https
|
||||
acl Safe_ports port 70 # gopher
|
||||
acl Safe_ports port 210 # wais
|
||||
acl Safe_ports port 1025-65535 # unregistered ports
|
||||
acl Safe_ports port 280 # http-mgmt
|
||||
acl Safe_ports port 488 # gss-http
|
||||
acl Safe_ports port 591 # filemaker
|
||||
acl Safe_ports port 777 # multiling http
|
||||
http_access deny !Safe_ports
|
||||
http_access deny CONNECT !SSL_ports
|
||||
http_access allow localhost manager
|
||||
http_access deny manager
|
||||
include /etc/squid/conf.d/*.conf
|
||||
http_access allow localhost
|
||||
acl ldap-auth proxy_auth REQUIRED
|
||||
http_access allow ldap-auth
|
||||
# http_access deny all
|
||||
http_port 3128
|
||||
coredump_dir /var/spool/squid
|
||||
refresh_pattern ^ftp: 1440 20% 10080
|
||||
refresh_pattern ^gopher: 1440 0% 1440
|
||||
refresh_pattern -i (/cgi-bin/|\?) 0 0% 0
|
||||
refresh_pattern \/(Packages|Sources)(|\.bz2|\.gz|\.xz)$ 0 0% 0 refresh-ims
|
||||
refresh_pattern \/Release(|\.gpg)$ 0 0% 0 refresh-ims
|
||||
refresh_pattern \/InRelease$ 0 0% 0 refresh-ims
|
||||
refresh_pattern \/(Translation-.*)(|\.bz2|\.gz|\.xz)$ 0 0% 0 refresh-ims
|
||||
refresh_pattern . 0 20% 4320
|
||||
```
|
||||
The password for the binduser is stored in `/etc/squid/ldap_password` e.g.
|
||||
```
|
||||
PASSWORD_FOR_BINDUSER
|
||||
```
|
||||
|
||||
After you restart squid with `systemctl restart squid` check it is working with
|
||||
```
|
||||
curl -O -L "https://www.redhat.com/index.html" -x "user_name:password@proxy.example.com:3128"
|
||||
```
|
||||
37
example_configs/tandoor_recipes.md
Normal file
37
example_configs/tandoor_recipes.md
Normal file
@@ -0,0 +1,37 @@
|
||||
# Tandoor Recipes LDAP configuration
|
||||
|
||||
## LDAP settings are defined by environmental variables as defined in [Tandoor's documentation](https://docs.tandoor.dev/features/authentication/#ldap)
|
||||
|
||||
### #Required#
|
||||
It is recommended to have a read-only account to bind to
|
||||
```
|
||||
LDAP_AUTH=1
|
||||
AUTH_LDAP_SERVER_URI=ldap://lldap:3890/
|
||||
AUTH_LDAP_BIND_DN=uid=ro_admin,ou=people,DC=example,DC=com
|
||||
AUTH_LDAP_BIND_PASSWORD=CHANGEME
|
||||
AUTH_LDAP_USER_SEARCH_BASE_DN=ou=people,DC=example,DC=com
|
||||
```
|
||||
|
||||
### #Optional#
|
||||
|
||||
By default it authenticates everybody identified by the search base DN, this allows you to pull certain users from the ```tandoor_users``` group
|
||||
```
|
||||
AUTH_LDAP_USER_SEARCH_FILTER_STR=(&(&(objectclass=person)(memberOf=cn=tandoor_users,ou=groups,dc=example,dc=com))(uid=%(user)s))
|
||||
```
|
||||
|
||||
Map Tandoor user fields with their LLDAP counterparts
|
||||
```
|
||||
AUTH_LDAP_USER_ATTR_MAP={'first_name': 'givenName', 'last_name': 'sn', 'email': 'mail'}
|
||||
```
|
||||
|
||||
Set whether or not to always update user fields at login and how many seconds for a timeout
|
||||
```
|
||||
AUTH_LDAP_ALWAYS_UPDATE_USER=1
|
||||
AUTH_LDAP_CACHE_TIMEOUT=3600
|
||||
```
|
||||
|
||||
If you use secure LDAP
|
||||
```
|
||||
AUTH_LDAP_START_TLS=1
|
||||
AUTH_LDAP_TLS_CACERTFILE=/etc/ssl/certs/own-ca.pem
|
||||
```
|
||||
43
example_configs/thelounge.md
Normal file
43
example_configs/thelounge.md
Normal file
@@ -0,0 +1,43 @@
|
||||
# Basic LDAP auth for a The Lounge IRC web-client
|
||||
|
||||
[Main documentation here.](https://thelounge.chat/docs/configuration#ldap-support)
|
||||
|
||||
## Simple Config:
|
||||
|
||||
In this config, The Lounge will use the credentials provided in web ui to authenticate with lldap. It'll allow access if authentication was successful.
|
||||
|
||||
```
|
||||
ldap: {
|
||||
enable: true,
|
||||
url: "ldap://localhost:389",
|
||||
tlsOptions: {},
|
||||
primaryKey: "uid",
|
||||
baseDN: "ou=people,dc=example,dc=com",
|
||||
},
|
||||
```
|
||||
|
||||
|
||||
## Advanced Config:
|
||||
|
||||
`rootDN` is similar to bind DN in other applications. It is used in combination with `rootPassword` to query lldap. `ldap-viewer` user in `lldap` is a member of the group `lldap_strict_readonly` group. This gives `ldap-viewer` user permission to query `lldap`.
|
||||
|
||||
|
||||
With the `filter`, You can limit The Lounge access to users who are a member of the group `thelounge`.
|
||||
|
||||
|
||||
```
|
||||
ldap: {
|
||||
enable: true,
|
||||
url: "ldap://localhost:389",
|
||||
tlsOptions: {},
|
||||
primaryKey: "uid",
|
||||
searchDN: {
|
||||
rootDN: "uid=ldap-viewer,ou=people,dc=example,dc=com",
|
||||
rootPassword: ""
|
||||
filter: "(memberOf=cn=thelounge,ou=groups,dc=example,dc=com)",
|
||||
base: "dc=example,dc=com",
|
||||
scope: "sub",
|
||||
},
|
||||
},
|
||||
```
|
||||
|
||||
56
example_configs/zabbix-web.md
Normal file
56
example_configs/zabbix-web.md
Normal file
@@ -0,0 +1,56 @@
|
||||
# Zabbix Web Configuration
|
||||
|
||||
This example is for the Zabbix Web interface version 6.0, which is the supported LTS version as of August 2023. Later versions have additional options.
|
||||
|
||||
For the associated 6.0 documentation see [here](https://www.zabbix.com/documentation/6.0/en/manual/web_interface/frontend_sections/administration/authentication) and for the current manual see [here](https://www.zabbix.com/documentation/current/en/manual).
|
||||
|
||||
***Note that an LDAP user must exist in Zabbix Web as well, however its Zabbix password will not be used.*** When creating the user in Zabbix, the user should also be added to your desired Zabbix roles/groups.
|
||||
|
||||
## Configure LDAP Settings
|
||||
|
||||
- Log in to the web interface as an admin
|
||||
- Navigate to `Administration > Authentication > LDAP Settings`
|
||||
|
||||
### Enable LDAP authentication
|
||||
|
||||
Checked
|
||||
|
||||
### LDAP host
|
||||
|
||||
URI of your LLDAP host. Example: `ldap://ldap.example.com:3890` or `ldaps://ldap.example.com:6360` for TLS.
|
||||
|
||||
### Port
|
||||
|
||||
Not used when using a full LDAP URI as above, but feel free to put `3890` or `6360` for TLS.
|
||||
|
||||
### Base DN
|
||||
|
||||
Your LLDAP_LDAP_BASE. Example: `dc=example,dc=com`
|
||||
|
||||
### Search attribute
|
||||
|
||||
`uid`
|
||||
|
||||
### Case-sensitive login
|
||||
|
||||
Checked
|
||||
|
||||
### Bind DN
|
||||
|
||||
`uid=admin,ou=people,dc=example,dc=com`
|
||||
|
||||
Alternately, it is recommended that you create a separate user account (e.g, `bind_user`) instead of `admin` for sharing Bind credentials with other services. The `bind_user` should be a member of the `lldap_strict_readonly` group to limit access to your LDAP configuration in LLDAP.
|
||||
|
||||
### Bind password
|
||||
|
||||
Password for the above bind DN user.
|
||||
|
||||
### Test authentication
|
||||
|
||||
The test authentication `Login` and `User password` must be used to check the connection and whether an LDAP user can be successfully authenticated. Zabbix will not activate LDAP authentication if it is unable to authenticate the test user.
|
||||
|
||||
## Enable LDAP in Zabbix Web
|
||||
|
||||
- Navigate to `Administration > Authentication > Authentication` (the first tab)
|
||||
- Set "Default authentication" to "LDAP"
|
||||
- Click "Update"
|
||||
143
example_configs/zulip.md
Normal file
143
example_configs/zulip.md
Normal file
@@ -0,0 +1,143 @@
|
||||
# Configuration for Zulip
|
||||
|
||||
Zulip combines the immediacy of real-time chat with an email threading model.
|
||||
|
||||
Their ldap-documentation is here: [zulip.readthedocs.io](https://zulip.readthedocs.io/en/stable/production/authentication-methods.html#ldap-including-active-directory)
|
||||
|
||||
Zulip has two installation methods, either by running the recommended installer or by docker/podman compose.
|
||||
The way how the service is configured differs depending on the installation method, so keep in mind you will only need one of the following examples.
|
||||
|
||||
> Important info
|
||||
> The available/configured userdata will be automatically imported at the first login.
|
||||
> If you want to import it before the user logs in for the first time or
|
||||
> if you want to keep the data in sync with LLDAP you need to trigger the import by hand (or via cronjob).
|
||||
> `/home/zulip/deployments/current/manage.py sync_ldap_user_data`
|
||||
|
||||
## Container based configuration
|
||||
The following configuration takes place in the environment section of your compose-file.
|
||||
|
||||
1) Enable the LDAP authentication backend
|
||||
Find the line`ZULIP_AUTH_BACKENDS: "EmailAuthBackend"` and change it to `ZULIP_AUTH_BACKENDS: "ZulipLDAPAuthBackend,EmailAuthBackend"`.
|
||||
|
||||
2) Configure how to connect with LLDAP
|
||||
The user specified in `SETTING_AUTH_LDAP_BIND_DN` is used to querry data from LLDAP.
|
||||
Zulip is only able to authenticate users and read data via ldap it is not able to write data or change the users password.
|
||||
Because of this limitation we will use the group `lldap_strict_readonly` for this user.
|
||||
Add the following lines to your configuration and change the values according to your setup.
|
||||
```
|
||||
SETTING_AUTH_LDAP_SERVER_URI: "ldap://lldap:3890"
|
||||
SETTING_AUTH_LDAP_BIND_DN: "uid=zulip,ou=people,dc=example,dc=com"
|
||||
SECRETS_auth_ldap_bind_password: "superSECURE_Pa55word"
|
||||
```
|
||||
|
||||
3) Configure how to search for existing users
|
||||
Add the following lines to your configuration and change the values according to your setup.
|
||||
```
|
||||
SETTING_AUTH_LDAP_USER_SEARCH: >
|
||||
LDAPSearch("ou=people,dc=example,dc=com", ldap.SCOPE_SUBTREE, "(uid=%(user)s)")
|
||||
SETTING_LDAP_EMAIL_ATTR: mail
|
||||
SETTING_AUTH_LDAP_REVERSE_EMAIL_SEARCH: >
|
||||
LDAPSearch("ou=people,dc=example,dc=com", ldap.SCOPE_SUBTREE, "(mail=%(email)s)")
|
||||
SETTING_AUTH_LDAP_USERNAME_ATTR: "uid"
|
||||
```
|
||||
|
||||
4) Configure the user-data mapping
|
||||
This step is optional, the sample below shows the maximum of available options, you can use all of them or none.
|
||||
Add the following lines to your configuration and remove the fields you don't want to be synced.
|
||||
The field `"full_name": "cn"` is mandatory.
|
||||
```
|
||||
SETTING_AUTH_LDAP_USER_ATTR_MAP: >
|
||||
{"full_name": "cn","first_name": "givenName","last_name": "sn","avatar": "jpegPhoto"}
|
||||
```
|
||||
|
||||
5) Configure which groups are allowed to authenticate
|
||||
This step is optional, if you do not specify anything here all users from your LLDAP server will be able to login.
|
||||
This example will grant access to all users who are a member of `zulip_users`.
|
||||
Add the following lines to your configuration and change the values according to your setup.
|
||||
```
|
||||
ZULIP_CUSTOM_SETTINGS: "import django_auth_ldap"
|
||||
SETTING_AUTH_LDAP_GROUP_TYPE: "django_auth_ldap.config.GroupOfUniqueNamesType(name_attr='cn')"
|
||||
SETTING_AUTH_LDAP_REQUIRE_GROUP: "cn=zulip_users,ou=groups,dc=example,dc=com"
|
||||
SETTING_AUTH_LDAP_GROUP_SEARCH: >
|
||||
LDAPSearch("ou=groups,dc=example,dc=com", ldap.SCOPE_SUBTREE, "(objectClass=GroupOfUniqueNames)")
|
||||
```
|
||||
|
||||
6) Disallow local changes after importing userdata
|
||||
This step is optional, you may want disallow the user to change their name and avatar if you import this data via ldap.
|
||||
Add the following lines to your configuration and change the values according to your setup.
|
||||
```
|
||||
SETTING_NAME_CHANGES_DISABLED: True
|
||||
SETTING_AVATAR_CHANGES_DISABLED: True
|
||||
```
|
||||
> Important Info
|
||||
> Zulip will not write the user profile back to your ldap server.
|
||||
> If the user changes their profil in Zulip those changes will be overwritten when the next syncronzation with LLDAP is triggerd.
|
||||
> Allow changes to the user profile only if you do not plan to synchronize it with LLDAP regularly.
|
||||
|
||||
|
||||
|
||||
## Installer based configuration
|
||||
The following configuration takes place in the configuration-file `/etc/zulip/settings.py`.
|
||||
|
||||
1) Enable the LDAP authentication backend
|
||||
Find the line `AUTHENTICATION_BACKENDS` and uncomment `"zproject.backends.ZulipLDAPAuthBackend"`.
|
||||
|
||||
2) Configure how to connect with LLDAP
|
||||
The user specified in `AUTH_LDAP_BIND_DN` is used to querry data from LLDAP.
|
||||
Zulip is only able to authenticate users and read data via ldap it is not able to write data or change the users password.
|
||||
Because of this limitation we will use the group `lldap_strict_readonly` for this user.
|
||||
Uncomment the following lines in your configuration and change the values according to your setup.
|
||||
```
|
||||
AUTH_LDAP_SERVER_URI = "ldap://lldap:3890"
|
||||
AUTH_LDAP_BIND_DN = "uid=zulip,ou=people,dc=example,dc=com"
|
||||
```
|
||||
|
||||
The password corresponding to AUTH_LDAP_BIND_DN goes in `/etc/zulip/zulip-secrets.conf`.
|
||||
Add a single new line to that file like below.
|
||||
```
|
||||
auth_ldap_bind_password = superSECURE_Pa55word
|
||||
```
|
||||
|
||||
3) Configure how to search for existing users
|
||||
Uncomment the following lines in your configuration and change the values according to your setup.
|
||||
```
|
||||
AUTH_LDAP_USER_SEARCH = LDAPSearch("ou=people,dc=example,dc=com", ldap.SCOPE_SUBTREE, "(uid=%(user)s)")
|
||||
LDAP_EMAIL_ATTR = mail
|
||||
AUTH_LDAP_REVERSE_EMAIL_SEARCH = LDAPSearch("ou=people,dc=example,dc=com", ldap.SCOPE_SUBTREE, "(mail=%(email)s)")
|
||||
AUTH_LDAP_USERNAME_ATTR = "uid"
|
||||
```
|
||||
|
||||
4) Configure the user-data mapping
|
||||
This step is optional, the sample below shows the maximum of available options, you can use all of them or none.
|
||||
Find the line `AUTH_LDAP_USER_ATTR_MAP`, then uncomment the values you want to map and change the values according to your setup.
|
||||
```
|
||||
AUTH_LDAP_USER_ATTR_MAP = {
|
||||
"full_name": "cn",
|
||||
"first_name": "givenName",
|
||||
"last_name": "sn",
|
||||
"avatar": "jpegPhoto",
|
||||
}
|
||||
```
|
||||
|
||||
5) Configure which groups are allowed to authenticate
|
||||
This step is optional, if you do not specify anything here all users from your LLDAP server will be able to login.
|
||||
This example will grant access to all users who are a member of `zulip_users`.
|
||||
Add the following lines to your configuration and change the values according to your setup.
|
||||
```
|
||||
import django_auth_ldap
|
||||
AUTH_LDAP_GROUP_TYPE = "django_auth_ldap.config.GroupOfUniqueNamesType(name_attr='cn')"
|
||||
AUTH_LDAP_REQUIRE_GROUP = "cn=zulip_users,ou=groups,dc=example,dc=com"
|
||||
AUTH_LDAP_GROUP_SEARCH = LDAPSearch("ou=groups,dc=example,dc=com", ldap.SCOPE_SUBTREE, "(objectClass=GroupOfUniqueNames)")
|
||||
```
|
||||
|
||||
6) Disallow local changes after importing userdata
|
||||
This step is optional, you may want disallow the user to change their name and avatar if you import this data via ldap.
|
||||
Uncomment the following lines in your configuration and change the values according to your setup.
|
||||
```
|
||||
NAME_CHANGES_DISABLED: True
|
||||
AVATAR_CHANGES_DISABLED: True
|
||||
```
|
||||
> Important Info
|
||||
> Zulip will not write the user profile back to your ldap server.
|
||||
> If the user changes their profil in Zulip those changes will be overwritten when the next syncronzation with LLDAP is triggerd.
|
||||
> Allow changes to the user profile only if you do not plan to synchronize it with LLDAP regularly.
|
||||
@@ -9,16 +9,16 @@ repository = "https://github.com/lldap/lldap"
|
||||
version = "0.4.2"
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1"
|
||||
anyhow = "*"
|
||||
base64 = "0.13"
|
||||
rand = "0.8"
|
||||
requestty = "0.4.1"
|
||||
serde = "1"
|
||||
serde_json = "1"
|
||||
smallvec = "1"
|
||||
smallvec = "*"
|
||||
|
||||
[dependencies.lldap_auth]
|
||||
version = "0.3"
|
||||
path = "../auth"
|
||||
features = ["opaque_client"]
|
||||
|
||||
[dependencies.graphql_client]
|
||||
@@ -27,11 +27,11 @@ default-features = false
|
||||
version = "0.11"
|
||||
|
||||
[dependencies.reqwest]
|
||||
version = "0.11"
|
||||
version = "*"
|
||||
default-features = false
|
||||
features = ["json", "blocking", "rustls-tls"]
|
||||
|
||||
[dependencies.ldap3]
|
||||
version = "0.11"
|
||||
version = "*"
|
||||
default-features = false
|
||||
features = ["sync", "tls-rustls"]
|
||||
|
||||
@@ -271,7 +271,7 @@ pub fn get_users(connection: &mut LdapClient) -> Result<Vec<User>, anyhow::Error
|
||||
.default(maybe_user_ou.unwrap_or_default())
|
||||
.auto_complete(|s, _| {
|
||||
let mut answers = autocomplete_domain_suffix(s, domain);
|
||||
answers.extend(all_ous.clone().into_iter());
|
||||
answers.extend(all_ous.clone());
|
||||
answers
|
||||
})
|
||||
.build();
|
||||
@@ -383,7 +383,7 @@ pub fn get_groups(connection: &mut LdapClient) -> Result<Vec<LdapGroup>> {
|
||||
.default(maybe_group_ou.unwrap_or_default())
|
||||
.auto_complete(|s, _| {
|
||||
let mut answers = autocomplete_domain_suffix(s, domain);
|
||||
answers.extend(all_ous.clone().into_iter());
|
||||
answers.extend(all_ous.clone());
|
||||
answers
|
||||
})
|
||||
.build();
|
||||
|
||||
28
schema.graphql
generated
28
schema.graphql
generated
@@ -1,3 +1,8 @@
|
||||
type AttributeValue {
|
||||
name: String!
|
||||
value: [String!]!
|
||||
}
|
||||
|
||||
input EqualityConstraint {
|
||||
field: String!
|
||||
value: String!
|
||||
@@ -19,6 +24,8 @@ type Group {
|
||||
displayName: String!
|
||||
creationDate: DateTimeUtc!
|
||||
uuid: String!
|
||||
"User-defined attributes."
|
||||
attributes: [AttributeValue!]!
|
||||
"The groups to which this user belongs."
|
||||
users: [User!]!
|
||||
}
|
||||
@@ -39,6 +46,11 @@ input RequestFilter {
|
||||
"DateTime"
|
||||
scalar DateTimeUtc
|
||||
|
||||
type Schema {
|
||||
userSchema: AttributeList!
|
||||
groupSchema: AttributeList!
|
||||
}
|
||||
|
||||
"The fields that can be updated for a group."
|
||||
input UpdateGroupInput {
|
||||
id: Int!
|
||||
@@ -51,6 +63,7 @@ type Query {
|
||||
users(filters: RequestFilter): [User!]!
|
||||
groups: [Group!]!
|
||||
group(groupId: Int!): Group!
|
||||
schema: Schema!
|
||||
}
|
||||
|
||||
"The details required to create a user."
|
||||
@@ -72,10 +85,25 @@ type User {
|
||||
avatar: String
|
||||
creationDate: DateTimeUtc!
|
||||
uuid: String!
|
||||
"User-defined attributes."
|
||||
attributes: [AttributeValue!]!
|
||||
"The groups to which this user belongs."
|
||||
groups: [Group!]!
|
||||
}
|
||||
|
||||
type AttributeList {
|
||||
attributes: [AttributeSchema!]!
|
||||
}
|
||||
|
||||
type AttributeSchema {
|
||||
name: String!
|
||||
attributeType: String!
|
||||
isList: Boolean!
|
||||
isVisible: Boolean!
|
||||
isEditable: Boolean!
|
||||
isHardcoded: Boolean!
|
||||
}
|
||||
|
||||
type Success {
|
||||
ok: Boolean!
|
||||
}
|
||||
|
||||
@@ -1,9 +1,15 @@
|
||||
#! /bin/bash
|
||||
|
||||
tables=("users" "groups" "memberships" "jwt_refresh_storage" "jwt_storage" "password_reset_tokens")
|
||||
tables=("users" "groups" "memberships" "jwt_refresh_storage" "jwt_storage" "password_reset_tokens" "group_attribute_schema" "group_attributes")
|
||||
echo ".header on"
|
||||
|
||||
for table in ${tables[@]}; do
|
||||
echo ".mode insert $table"
|
||||
echo "select * from $table;"
|
||||
done
|
||||
done
|
||||
|
||||
echo ".mode insert user_attribute_schema"
|
||||
echo "select * from user_attribute_schema where user_attribute_schema_name not in ('first_name', 'last_name', 'avatar');"
|
||||
|
||||
echo ".mode insert user_attributes"
|
||||
echo "select * from user_attributes;"
|
||||
|
||||
@@ -8,7 +8,7 @@ keywords = ["cli", "ldap", "graphql", "server", "authentication"]
|
||||
license = "GPL-3.0-only"
|
||||
name = "lldap"
|
||||
repository = "https://github.com/lldap/lldap"
|
||||
version = "0.5.0-alpha"
|
||||
version = "0.5.0"
|
||||
|
||||
[dependencies]
|
||||
actix = "0.13"
|
||||
@@ -19,45 +19,45 @@ actix-server = "2"
|
||||
actix-service = "2"
|
||||
actix-web = "4.3"
|
||||
actix-web-httpauth = "0.8"
|
||||
anyhow = "1"
|
||||
anyhow = "*"
|
||||
async-trait = "0.1"
|
||||
base64 = "0.21"
|
||||
bincode = "1.3"
|
||||
cron = "0.12"
|
||||
cron = "*"
|
||||
derive_builder = "0.12"
|
||||
figment_file_provider_adapter = "0.1"
|
||||
futures = "0.3"
|
||||
futures-util = "0.3"
|
||||
futures = "*"
|
||||
futures-util = "*"
|
||||
hmac = "0.12"
|
||||
http = "0.2"
|
||||
http = "*"
|
||||
itertools = "0.10"
|
||||
juniper = "0.15"
|
||||
jwt = "0.16"
|
||||
lber = "0.4.1"
|
||||
ldap3_proto = ">=0.3.1"
|
||||
log = "0.4"
|
||||
ldap3_proto = "^0.4"
|
||||
log = "*"
|
||||
orion = "0.17"
|
||||
rand_chacha = "0.3"
|
||||
rustls-pemfile = "1"
|
||||
serde = "1"
|
||||
serde = "*"
|
||||
serde_bytes = "0.11"
|
||||
serde_json = "1"
|
||||
sha2 = "0.10"
|
||||
thiserror = "1"
|
||||
thiserror = "*"
|
||||
time = "0.3"
|
||||
tokio-rustls = "0.23"
|
||||
tokio-stream = "0.1"
|
||||
tokio-stream = "*"
|
||||
tokio-util = "0.7"
|
||||
tracing = "0.1"
|
||||
tracing = "*"
|
||||
tracing-actix-web = "0.7"
|
||||
tracing-attributes = "^0.1.21"
|
||||
tracing-log = "0.1"
|
||||
tracing-log = "*"
|
||||
urlencoding = "2"
|
||||
webpki-roots = "0.23"
|
||||
webpki-roots = "*"
|
||||
|
||||
[dependencies.chrono]
|
||||
features = ["serde"]
|
||||
version = "0.4"
|
||||
version = "*"
|
||||
|
||||
[dependencies.clap]
|
||||
features = ["std", "color", "suggestions", "derive", "env"]
|
||||
@@ -65,7 +65,7 @@ version = "4"
|
||||
|
||||
[dependencies.figment]
|
||||
features = ["env", "toml"]
|
||||
version = "0.10"
|
||||
version = "*"
|
||||
|
||||
[dependencies.tracing-subscriber]
|
||||
version = "0.3"
|
||||
@@ -77,7 +77,7 @@ default-features = false
|
||||
version = "0.10.1"
|
||||
|
||||
[dependencies.lldap_auth]
|
||||
version = "0.3"
|
||||
path = "../auth"
|
||||
|
||||
[dependencies.opaque-ke]
|
||||
version = "0.6"
|
||||
@@ -88,19 +88,23 @@ version = "0.8"
|
||||
|
||||
[dependencies.secstr]
|
||||
features = ["serde"]
|
||||
version = "0.5"
|
||||
version = "*"
|
||||
|
||||
[dependencies.strum]
|
||||
features = ["derive"]
|
||||
version = "0.25"
|
||||
|
||||
[dependencies.tokio]
|
||||
features = ["full"]
|
||||
version = "1.25"
|
||||
|
||||
[dependencies.uuid]
|
||||
features = ["v3"]
|
||||
features = ["v1", "v3"]
|
||||
version = "1"
|
||||
|
||||
[dependencies.tracing-forest]
|
||||
features = ["smallvec", "chrono", "tokio"]
|
||||
version = "^0.1.4"
|
||||
version = "^0.1.6"
|
||||
|
||||
[dependencies.actix-tls]
|
||||
features = ["default", "rustls"]
|
||||
@@ -112,7 +116,7 @@ default-features = false
|
||||
version = "0.24"
|
||||
|
||||
[dependencies.sea-orm]
|
||||
version= "0.11"
|
||||
version= "0.12"
|
||||
default-features = false
|
||||
features = ["macros", "with-chrono", "with-uuid", "sqlx-all", "runtime-actix-rustls"]
|
||||
|
||||
@@ -125,10 +129,15 @@ features = ["rustls-tls-webpki-roots"]
|
||||
version = "0.20"
|
||||
features = ["dangerous_configuration"]
|
||||
|
||||
[dependencies.url]
|
||||
version = "2"
|
||||
features = ["serde"]
|
||||
|
||||
[dev-dependencies]
|
||||
assert_cmd = "2.0"
|
||||
mockall = "0.11"
|
||||
mockall = "0.11.4"
|
||||
nix = "0.26.2"
|
||||
pretty_assertions = "1"
|
||||
|
||||
[dev-dependencies.graphql_client]
|
||||
features = ["graphql_query_derive", "reqwest-rustls"]
|
||||
@@ -136,12 +145,12 @@ default-features = false
|
||||
version = "0.11"
|
||||
|
||||
[dev-dependencies.ldap3]
|
||||
version = "0.11"
|
||||
version = "*"
|
||||
default-features = false
|
||||
features = ["sync", "tls-rustls"]
|
||||
|
||||
[dev-dependencies.reqwest]
|
||||
version = "0.11"
|
||||
version = "*"
|
||||
default-features = false
|
||||
features = ["json", "blocking", "rustls-tls"]
|
||||
|
||||
|
||||
@@ -7,6 +7,8 @@ pub enum DomainError {
|
||||
AuthenticationError(String),
|
||||
#[error("Database error: `{0}`")]
|
||||
DatabaseError(#[from] sea_orm::DbErr),
|
||||
#[error("Database transaction error: `{0}`")]
|
||||
DatabaseTransactionError(#[from] sea_orm::TransactionError<sea_orm::DbErr>),
|
||||
#[error("Authentication protocol error for `{0}`")]
|
||||
AuthenticationProtocolError(#[from] lldap_auth::opaque::AuthenticationError),
|
||||
#[error("Unknown crypto error: `{0}`")]
|
||||
@@ -21,4 +23,13 @@ pub enum DomainError {
|
||||
InternalError(String),
|
||||
}
|
||||
|
||||
impl From<sea_orm::TransactionError<DomainError>> for DomainError {
|
||||
fn from(value: sea_orm::TransactionError<DomainError>) -> Self {
|
||||
match value {
|
||||
sea_orm::TransactionError::Connection(e) => e.into(),
|
||||
sea_orm::TransactionError::Transaction(e) => e,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub type Result<T> = std::result::Result<T, DomainError>;
|
||||
|
||||
@@ -1,7 +1,8 @@
|
||||
use super::{
|
||||
use crate::domain::{
|
||||
error::Result,
|
||||
types::{
|
||||
Group, GroupDetails, GroupId, JpegPhoto, User, UserAndGroups, UserColumn, UserId, Uuid,
|
||||
AttributeType, Group, GroupDetails, GroupId, JpegPhoto, User, UserAndGroups, UserColumn,
|
||||
UserId, Uuid,
|
||||
},
|
||||
};
|
||||
use async_trait::async_trait;
|
||||
@@ -53,6 +54,7 @@ pub enum UserRequestFilter {
|
||||
UserId(UserId),
|
||||
UserIdSubString(SubStringFilter),
|
||||
Equality(UserColumn, String),
|
||||
AttributeEquality(String, String),
|
||||
SubString(UserColumn, SubStringFilter),
|
||||
// Check if a user belongs to a group identified by name.
|
||||
MemberOf(String),
|
||||
@@ -121,18 +123,49 @@ pub struct UpdateGroupRequest {
|
||||
pub display_name: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct AttributeSchema {
|
||||
pub name: String,
|
||||
//TODO: pub aliases: Vec<String>,
|
||||
pub attribute_type: AttributeType,
|
||||
pub is_list: bool,
|
||||
pub is_visible: bool,
|
||||
pub is_editable: bool,
|
||||
pub is_hardcoded: bool,
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct AttributeList {
|
||||
pub attributes: Vec<AttributeSchema>,
|
||||
}
|
||||
|
||||
impl AttributeList {
|
||||
pub fn get_attribute_type(&self, name: &str) -> Option<(AttributeType, bool)> {
|
||||
self.attributes
|
||||
.iter()
|
||||
.find(|a| a.name == name)
|
||||
.map(|a| (a.attribute_type, a.is_list))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct Schema {
|
||||
pub user_attributes: AttributeList,
|
||||
pub group_attributes: AttributeList,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait LoginHandler: Send + Sync {
|
||||
async fn bind(&self, request: BindRequest) -> Result<()>;
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait GroupListerBackendHandler {
|
||||
pub trait GroupListerBackendHandler: SchemaBackendHandler {
|
||||
async fn list_groups(&self, filters: Option<GroupRequestFilter>) -> Result<Vec<Group>>;
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait GroupBackendHandler {
|
||||
pub trait GroupBackendHandler: SchemaBackendHandler {
|
||||
async fn get_group_details(&self, group_id: GroupId) -> Result<GroupDetails>;
|
||||
async fn update_group(&self, request: UpdateGroupRequest) -> Result<()>;
|
||||
async fn create_group(&self, group_name: &str) -> Result<GroupId>;
|
||||
@@ -140,7 +173,7 @@ pub trait GroupBackendHandler {
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait UserListerBackendHandler {
|
||||
pub trait UserListerBackendHandler: SchemaBackendHandler {
|
||||
async fn list_users(
|
||||
&self,
|
||||
filters: Option<UserRequestFilter>,
|
||||
@@ -149,7 +182,7 @@ pub trait UserListerBackendHandler {
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait UserBackendHandler {
|
||||
pub trait UserBackendHandler: SchemaBackendHandler {
|
||||
async fn get_user_details(&self, user_id: &UserId) -> Result<User>;
|
||||
async fn create_user(&self, request: CreateUserRequest) -> Result<()>;
|
||||
async fn update_user(&self, request: UpdateUserRequest) -> Result<()>;
|
||||
@@ -159,6 +192,11 @@ pub trait UserBackendHandler {
|
||||
async fn get_user_groups(&self, user_id: &UserId) -> Result<HashSet<GroupDetails>>;
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait SchemaBackendHandler {
|
||||
async fn get_schema(&self) -> Result<Schema>;
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait BackendHandler:
|
||||
Send
|
||||
@@ -167,53 +205,16 @@ pub trait BackendHandler:
|
||||
+ UserBackendHandler
|
||||
+ UserListerBackendHandler
|
||||
+ GroupListerBackendHandler
|
||||
+ SchemaBackendHandler
|
||||
{
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mockall::mock! {
|
||||
pub TestBackendHandler{}
|
||||
impl Clone for TestBackendHandler {
|
||||
fn clone(&self) -> Self;
|
||||
}
|
||||
#[async_trait]
|
||||
impl GroupListerBackendHandler for TestBackendHandler {
|
||||
async fn list_groups(&self, filters: Option<GroupRequestFilter>) -> Result<Vec<Group>>;
|
||||
}
|
||||
#[async_trait]
|
||||
impl GroupBackendHandler for TestBackendHandler {
|
||||
async fn get_group_details(&self, group_id: GroupId) -> Result<GroupDetails>;
|
||||
async fn update_group(&self, request: UpdateGroupRequest) -> Result<()>;
|
||||
async fn create_group(&self, group_name: &str) -> Result<GroupId>;
|
||||
async fn delete_group(&self, group_id: GroupId) -> Result<()>;
|
||||
}
|
||||
#[async_trait]
|
||||
impl UserListerBackendHandler for TestBackendHandler {
|
||||
async fn list_users(&self, filters: Option<UserRequestFilter>, get_groups: bool) -> Result<Vec<UserAndGroups>>;
|
||||
}
|
||||
#[async_trait]
|
||||
impl UserBackendHandler for TestBackendHandler {
|
||||
async fn get_user_details(&self, user_id: &UserId) -> Result<User>;
|
||||
async fn create_user(&self, request: CreateUserRequest) -> Result<()>;
|
||||
async fn update_user(&self, request: UpdateUserRequest) -> Result<()>;
|
||||
async fn delete_user(&self, user_id: &UserId) -> Result<()>;
|
||||
async fn get_user_groups(&self, user_id: &UserId) -> Result<HashSet<GroupDetails>>;
|
||||
async fn add_user_to_group(&self, user_id: &UserId, group_id: GroupId) -> Result<()>;
|
||||
async fn remove_user_from_group(&self, user_id: &UserId, group_id: GroupId) -> Result<()>;
|
||||
}
|
||||
#[async_trait]
|
||||
impl BackendHandler for TestBackendHandler {}
|
||||
#[async_trait]
|
||||
impl LoginHandler for TestBackendHandler {
|
||||
async fn bind(&self, request: BindRequest) -> Result<()>;
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use base64::Engine;
|
||||
|
||||
use super::*;
|
||||
use base64::Engine;
|
||||
use pretty_assertions::assert_ne;
|
||||
|
||||
#[test]
|
||||
fn test_uuid_time() {
|
||||
use chrono::prelude::*;
|
||||
|
||||
@@ -6,7 +6,7 @@ use tracing::{debug, instrument, warn};
|
||||
use crate::domain::{
|
||||
handler::{GroupListerBackendHandler, GroupRequestFilter},
|
||||
ldap::error::LdapError,
|
||||
types::{Group, GroupColumn, UserId, Uuid},
|
||||
types::{Group, UserId, Uuid},
|
||||
};
|
||||
|
||||
use super::{
|
||||
@@ -140,10 +140,8 @@ fn convert_group_filter(
|
||||
GroupRequestFilter::from(false)
|
||||
})),
|
||||
_ => match map_group_field(field) {
|
||||
Some(GroupColumn::DisplayName) => {
|
||||
Ok(GroupRequestFilter::DisplayName(value.to_string()))
|
||||
}
|
||||
Some(GroupColumn::Uuid) => Ok(GroupRequestFilter::Uuid(
|
||||
Some("display_name") => Ok(GroupRequestFilter::DisplayName(value.to_string())),
|
||||
Some("uuid") => Ok(GroupRequestFilter::Uuid(
|
||||
Uuid::try_from(value.as_str()).map_err(|e| LdapError {
|
||||
code: LdapResultCode::InappropriateMatching,
|
||||
message: format!("Invalid UUID: {:#}", e),
|
||||
@@ -181,7 +179,7 @@ fn convert_group_filter(
|
||||
LdapFilter::Substring(field, substring_filter) => {
|
||||
let field = &field.to_ascii_lowercase();
|
||||
match map_group_field(field.as_str()) {
|
||||
Some(GroupColumn::DisplayName) => Ok(GroupRequestFilter::DisplayNameSubString(
|
||||
Some("display_name") => Ok(GroupRequestFilter::DisplayNameSubString(
|
||||
substring_filter.clone().into(),
|
||||
)),
|
||||
_ => Err(LdapError {
|
||||
@@ -200,14 +198,13 @@ fn convert_group_filter(
|
||||
}
|
||||
}
|
||||
|
||||
#[instrument(skip_all, level = "debug")]
|
||||
#[instrument(skip_all, level = "debug", fields(ldap_filter))]
|
||||
pub async fn get_groups_list<Backend: GroupListerBackendHandler>(
|
||||
ldap_info: &LdapInfo,
|
||||
ldap_filter: &LdapFilter,
|
||||
base: &str,
|
||||
backend: &Backend,
|
||||
) -> LdapResult<Vec<Group>> {
|
||||
debug!(?ldap_filter);
|
||||
let filters = convert_group_filter(ldap_info, ldap_filter)?;
|
||||
debug!(?filters);
|
||||
backend
|
||||
|
||||
@@ -5,25 +5,24 @@ use ldap3_proto::{
|
||||
use tracing::{debug, instrument, warn};
|
||||
|
||||
use crate::domain::{
|
||||
handler::{UserListerBackendHandler, UserRequestFilter},
|
||||
handler::{Schema, UserListerBackendHandler, UserRequestFilter},
|
||||
ldap::{
|
||||
error::LdapError,
|
||||
utils::{expand_attribute_wildcards, get_user_id_from_distinguished_name},
|
||||
error::{LdapError, LdapResult},
|
||||
utils::{
|
||||
expand_attribute_wildcards, get_custom_attribute, get_group_id_from_distinguished_name,
|
||||
get_user_id_from_distinguished_name, map_user_field, LdapInfo, UserFieldType,
|
||||
},
|
||||
},
|
||||
types::{GroupDetails, User, UserAndGroups, UserColumn, UserId},
|
||||
};
|
||||
|
||||
use super::{
|
||||
error::LdapResult,
|
||||
utils::{get_group_id_from_distinguished_name, map_user_field, LdapInfo},
|
||||
};
|
||||
|
||||
pub fn get_user_attribute(
|
||||
user: &User,
|
||||
attribute: &str,
|
||||
base_dn_str: &str,
|
||||
groups: Option<&[GroupDetails]>,
|
||||
ignored_user_attributes: &[String],
|
||||
schema: &Schema,
|
||||
) -> Option<Vec<Vec<u8>>> {
|
||||
let attribute = attribute.to_ascii_lowercase();
|
||||
let attribute_values = match attribute.as_str() {
|
||||
@@ -38,9 +37,13 @@ pub fn get_user_attribute(
|
||||
"uid" | "user_id" | "id" => vec![user.user_id.to_string().into_bytes()],
|
||||
"entryuuid" | "uuid" => vec![user.uuid.to_string().into_bytes()],
|
||||
"mail" | "email" => vec![user.email.clone().into_bytes()],
|
||||
"givenname" | "first_name" | "firstname" => vec![user.first_name.clone()?.into_bytes()],
|
||||
"sn" | "last_name" | "lastname" => vec![user.last_name.clone()?.into_bytes()],
|
||||
"jpegphoto" | "avatar" => vec![user.avatar.clone()?.into_bytes()],
|
||||
"givenname" | "first_name" | "firstname" => {
|
||||
get_custom_attribute(&user.attributes, "first_name", schema)?
|
||||
}
|
||||
"sn" | "last_name" | "lastname" => {
|
||||
get_custom_attribute(&user.attributes, "last_name", schema)?
|
||||
}
|
||||
"jpegphoto" | "avatar" => get_custom_attribute(&user.attributes, "avatar", schema)?,
|
||||
"memberof" => groups
|
||||
.into_iter()
|
||||
.flatten()
|
||||
@@ -100,6 +103,7 @@ fn make_ldap_search_user_result_entry(
|
||||
attributes: &[String],
|
||||
groups: Option<&[GroupDetails]>,
|
||||
ignored_user_attributes: &[String],
|
||||
schema: &Schema,
|
||||
) -> LdapSearchResultEntry {
|
||||
let expanded_attributes = expand_user_attribute_wildcards(attributes);
|
||||
let dn = format!("uid={},ou=people,{}", user.user_id.as_str(), base_dn_str);
|
||||
@@ -108,8 +112,14 @@ fn make_ldap_search_user_result_entry(
|
||||
attributes: expanded_attributes
|
||||
.iter()
|
||||
.filter_map(|a| {
|
||||
let values =
|
||||
get_user_attribute(&user, a, base_dn_str, groups, ignored_user_attributes)?;
|
||||
let values = get_user_attribute(
|
||||
&user,
|
||||
a,
|
||||
base_dn_str,
|
||||
groups,
|
||||
ignored_user_attributes,
|
||||
schema,
|
||||
)?;
|
||||
Some(LdapPartialAttribute {
|
||||
atype: a.to_string(),
|
||||
vals: values,
|
||||
@@ -154,9 +164,17 @@ fn convert_user_filter(ldap_info: &LdapInfo, filter: &LdapFilter) -> LdapResult<
|
||||
UserRequestFilter::from(false)
|
||||
})),
|
||||
_ => match map_user_field(field) {
|
||||
Some(UserColumn::UserId) => Ok(UserRequestFilter::UserId(UserId::new(value))),
|
||||
Some(field) => Ok(UserRequestFilter::Equality(field, value.clone())),
|
||||
None => {
|
||||
UserFieldType::PrimaryField(UserColumn::UserId) => {
|
||||
Ok(UserRequestFilter::UserId(UserId::new(value)))
|
||||
}
|
||||
UserFieldType::PrimaryField(field) => {
|
||||
Ok(UserRequestFilter::Equality(field, value.clone()))
|
||||
}
|
||||
UserFieldType::Attribute(field) => Ok(UserRequestFilter::AttributeEquality(
|
||||
field.to_owned(),
|
||||
value.clone(),
|
||||
)),
|
||||
UserFieldType::NoMatch => {
|
||||
if !ldap_info.ignored_user_attributes.contains(field) {
|
||||
warn!(
|
||||
r#"Ignoring unknown user attribute "{}" in filter.\n\
|
||||
@@ -176,26 +194,26 @@ fn convert_user_filter(ldap_info: &LdapInfo, filter: &LdapFilter) -> LdapResult<
|
||||
field == "objectclass"
|
||||
|| field == "dn"
|
||||
|| field == "distinguishedname"
|
||||
|| map_user_field(field).is_some(),
|
||||
|| !matches!(map_user_field(field), UserFieldType::NoMatch),
|
||||
))
|
||||
}
|
||||
LdapFilter::Substring(field, substring_filter) => {
|
||||
let field = &field.to_ascii_lowercase();
|
||||
match map_user_field(field.as_str()) {
|
||||
Some(UserColumn::UserId) => Ok(UserRequestFilter::UserIdSubString(
|
||||
substring_filter.clone().into(),
|
||||
)),
|
||||
None
|
||||
| Some(UserColumn::CreationDate)
|
||||
| Some(UserColumn::Avatar)
|
||||
| Some(UserColumn::Uuid) => Err(LdapError {
|
||||
UserFieldType::PrimaryField(UserColumn::UserId) => Ok(
|
||||
UserRequestFilter::UserIdSubString(substring_filter.clone().into()),
|
||||
),
|
||||
UserFieldType::NoMatch
|
||||
| UserFieldType::Attribute(_)
|
||||
| UserFieldType::PrimaryField(UserColumn::CreationDate)
|
||||
| UserFieldType::PrimaryField(UserColumn::Uuid) => Err(LdapError {
|
||||
code: LdapResultCode::UnwillingToPerform,
|
||||
message: format!(
|
||||
"Unsupported user attribute for substring filter: {:?}",
|
||||
field
|
||||
),
|
||||
}),
|
||||
Some(field) => Ok(UserRequestFilter::SubString(
|
||||
UserFieldType::PrimaryField(field) => Ok(UserRequestFilter::SubString(
|
||||
field,
|
||||
substring_filter.clone().into(),
|
||||
)),
|
||||
@@ -212,7 +230,7 @@ fn expand_user_attribute_wildcards(attributes: &[String]) -> Vec<&str> {
|
||||
expand_attribute_wildcards(attributes, ALL_USER_ATTRIBUTE_KEYS)
|
||||
}
|
||||
|
||||
#[instrument(skip_all, level = "debug")]
|
||||
#[instrument(skip_all, level = "debug", fields(ldap_filter, request_groups))]
|
||||
pub async fn get_user_list<Backend: UserListerBackendHandler>(
|
||||
ldap_info: &LdapInfo,
|
||||
ldap_filter: &LdapFilter,
|
||||
@@ -220,7 +238,6 @@ pub async fn get_user_list<Backend: UserListerBackendHandler>(
|
||||
base: &str,
|
||||
backend: &Backend,
|
||||
) -> LdapResult<Vec<UserAndGroups>> {
|
||||
debug!(?ldap_filter);
|
||||
let filters = convert_user_filter(ldap_info, ldap_filter)?;
|
||||
debug!(?filters);
|
||||
backend
|
||||
@@ -236,6 +253,7 @@ pub fn convert_users_to_ldap_op<'a>(
|
||||
users: Vec<UserAndGroups>,
|
||||
attributes: &'a [String],
|
||||
ldap_info: &'a LdapInfo,
|
||||
schema: &'a Schema,
|
||||
) -> impl Iterator<Item = LdapOp> + 'a {
|
||||
users.into_iter().map(move |u| {
|
||||
LdapOp::SearchResultEntry(make_ldap_search_user_result_entry(
|
||||
@@ -244,6 +262,7 @@ pub fn convert_users_to_ldap_op<'a>(
|
||||
attributes,
|
||||
u.groups.as_deref(),
|
||||
&ldap_info.ignored_user_attributes,
|
||||
schema,
|
||||
))
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,11 +1,12 @@
|
||||
use chrono::{NaiveDateTime, TimeZone};
|
||||
use itertools::Itertools;
|
||||
use ldap3_proto::{proto::LdapSubstringFilter, LdapResultCode};
|
||||
use tracing::{debug, instrument, warn};
|
||||
|
||||
use crate::domain::{
|
||||
handler::SubStringFilter,
|
||||
handler::{Schema, SubStringFilter},
|
||||
ldap::error::{LdapError, LdapResult},
|
||||
types::{GroupColumn, UserColumn, UserId},
|
||||
types::{AttributeType, AttributeValue, JpegPhoto, UserColumn, UserId},
|
||||
};
|
||||
|
||||
impl From<LdapSubstringFilter> for SubStringFilter {
|
||||
@@ -105,7 +106,7 @@ pub fn get_group_id_from_distinguished_name(
|
||||
get_id_from_distinguished_name(dn, base_tree, base_dn_str, true)
|
||||
}
|
||||
|
||||
#[instrument(skip_all, level = "debug")]
|
||||
#[instrument(skip(all_attribute_keys), level = "debug")]
|
||||
pub fn expand_attribute_wildcards<'a>(
|
||||
ldap_attributes: &'a [String],
|
||||
all_attribute_keys: &'a [&'static str],
|
||||
@@ -127,7 +128,7 @@ pub fn expand_attribute_wildcards<'a>(
|
||||
.into_iter()
|
||||
.unique_by(|a| a.to_ascii_lowercase())
|
||||
.collect_vec();
|
||||
debug!(?ldap_attributes, ?resolved_attributes);
|
||||
debug!(?resolved_attributes);
|
||||
resolved_attributes
|
||||
}
|
||||
|
||||
@@ -152,31 +153,37 @@ pub fn is_subtree(subtree: &[(String, String)], base_tree: &[(String, String)])
|
||||
true
|
||||
}
|
||||
|
||||
pub fn map_user_field(field: &str) -> Option<UserColumn> {
|
||||
assert!(field == field.to_ascii_lowercase());
|
||||
Some(match field {
|
||||
"uid" | "user_id" | "id" => UserColumn::UserId,
|
||||
"mail" | "email" => UserColumn::Email,
|
||||
"cn" | "displayname" | "display_name" => UserColumn::DisplayName,
|
||||
"givenname" | "first_name" | "firstname" => UserColumn::FirstName,
|
||||
"sn" | "last_name" | "lastname" => UserColumn::LastName,
|
||||
"avatar" | "jpegphoto" => UserColumn::Avatar,
|
||||
"creationdate" | "createtimestamp" | "modifytimestamp" | "creation_date" => {
|
||||
UserColumn::CreationDate
|
||||
}
|
||||
"entryuuid" | "uuid" => UserColumn::Uuid,
|
||||
_ => return None,
|
||||
})
|
||||
pub enum UserFieldType {
|
||||
NoMatch,
|
||||
PrimaryField(UserColumn),
|
||||
Attribute(&'static str),
|
||||
}
|
||||
|
||||
pub fn map_group_field(field: &str) -> Option<GroupColumn> {
|
||||
pub fn map_user_field(field: &str) -> UserFieldType {
|
||||
assert!(field == field.to_ascii_lowercase());
|
||||
match field {
|
||||
"uid" | "user_id" | "id" => UserFieldType::PrimaryField(UserColumn::UserId),
|
||||
"mail" | "email" => UserFieldType::PrimaryField(UserColumn::Email),
|
||||
"cn" | "displayname" | "display_name" => {
|
||||
UserFieldType::PrimaryField(UserColumn::DisplayName)
|
||||
}
|
||||
"givenname" | "first_name" | "firstname" => UserFieldType::Attribute("first_name"),
|
||||
"sn" | "last_name" | "lastname" => UserFieldType::Attribute("last_name"),
|
||||
"avatar" | "jpegphoto" => UserFieldType::Attribute("avatar"),
|
||||
"creationdate" | "createtimestamp" | "modifytimestamp" | "creation_date" => {
|
||||
UserFieldType::PrimaryField(UserColumn::CreationDate)
|
||||
}
|
||||
"entryuuid" | "uuid" => UserFieldType::PrimaryField(UserColumn::Uuid),
|
||||
_ => UserFieldType::NoMatch,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn map_group_field(field: &str) -> Option<&'static str> {
|
||||
assert!(field == field.to_ascii_lowercase());
|
||||
Some(match field {
|
||||
"cn" | "displayname" | "uid" | "display_name" => GroupColumn::DisplayName,
|
||||
"creationdate" | "createtimestamp" | "modifytimestamp" | "creation_date" => {
|
||||
GroupColumn::CreationDate
|
||||
}
|
||||
"entryuuid" | "uuid" => GroupColumn::Uuid,
|
||||
"cn" | "displayname" | "uid" | "display_name" => "display_name",
|
||||
"creationdate" | "createtimestamp" | "modifytimestamp" | "creation_date" => "creation_date",
|
||||
"entryuuid" | "uuid" => "uuid",
|
||||
_ => return None,
|
||||
})
|
||||
}
|
||||
@@ -187,3 +194,64 @@ pub struct LdapInfo {
|
||||
pub ignored_user_attributes: Vec<String>,
|
||||
pub ignored_group_attributes: Vec<String>,
|
||||
}
|
||||
|
||||
pub fn get_custom_attribute(
|
||||
attributes: &[AttributeValue],
|
||||
attribute_name: &str,
|
||||
schema: &Schema,
|
||||
) -> Option<Vec<Vec<u8>>> {
|
||||
let convert_date = |date| {
|
||||
chrono::Utc
|
||||
.from_utc_datetime(&date)
|
||||
.to_rfc3339()
|
||||
.into_bytes()
|
||||
};
|
||||
schema
|
||||
.user_attributes
|
||||
.get_attribute_type(attribute_name)
|
||||
.and_then(|attribute_type| {
|
||||
attributes
|
||||
.iter()
|
||||
.find(|a| a.name == attribute_name)
|
||||
.map(|attribute| match attribute_type {
|
||||
(AttributeType::String, false) => {
|
||||
vec![attribute.value.unwrap::<String>().into_bytes()]
|
||||
}
|
||||
(AttributeType::Integer, false) => {
|
||||
// LDAP integers are encoded as strings.
|
||||
vec![attribute.value.unwrap::<i64>().to_string().into_bytes()]
|
||||
}
|
||||
(AttributeType::JpegPhoto, false) => {
|
||||
vec![attribute.value.unwrap::<JpegPhoto>().into_bytes()]
|
||||
}
|
||||
(AttributeType::DateTime, false) => {
|
||||
vec![convert_date(attribute.value.unwrap::<NaiveDateTime>())]
|
||||
}
|
||||
(AttributeType::String, true) => attribute
|
||||
.value
|
||||
.unwrap::<Vec<String>>()
|
||||
.into_iter()
|
||||
.map(String::into_bytes)
|
||||
.collect(),
|
||||
(AttributeType::Integer, true) => attribute
|
||||
.value
|
||||
.unwrap::<Vec<i64>>()
|
||||
.into_iter()
|
||||
.map(|i| i.to_string())
|
||||
.map(String::into_bytes)
|
||||
.collect(),
|
||||
(AttributeType::JpegPhoto, true) => attribute
|
||||
.value
|
||||
.unwrap::<Vec<JpegPhoto>>()
|
||||
.into_iter()
|
||||
.map(JpegPhoto::into_bytes)
|
||||
.collect(),
|
||||
(AttributeType::DateTime, true) => attribute
|
||||
.value
|
||||
.unwrap::<Vec<NaiveDateTime>>()
|
||||
.into_iter()
|
||||
.map(convert_date)
|
||||
.collect(),
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
@@ -7,6 +7,7 @@ pub mod sql_backend_handler;
|
||||
pub mod sql_group_backend_handler;
|
||||
pub mod sql_migrations;
|
||||
pub mod sql_opaque_handler;
|
||||
pub mod sql_schema_backend_handler;
|
||||
pub mod sql_tables;
|
||||
pub mod sql_user_backend_handler;
|
||||
pub mod types;
|
||||
|
||||
52
server/src/domain/model/group_attribute_schema.rs
Normal file
52
server/src/domain/model/group_attribute_schema.rs
Normal file
@@ -0,0 +1,52 @@
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::domain::{handler::AttributeSchema, types::AttributeType};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "group_attribute_schema")]
|
||||
pub struct Model {
|
||||
#[sea_orm(
|
||||
primary_key,
|
||||
auto_increment = false,
|
||||
column_name = "group_attribute_schema_name"
|
||||
)]
|
||||
pub attribute_name: String,
|
||||
#[sea_orm(column_name = "group_attribute_schema_type")]
|
||||
pub attribute_type: AttributeType,
|
||||
#[sea_orm(column_name = "group_attribute_schema_is_list")]
|
||||
pub is_list: bool,
|
||||
#[sea_orm(column_name = "group_attribute_schema_is_group_visible")]
|
||||
pub is_group_visible: bool,
|
||||
#[sea_orm(column_name = "group_attribute_schema_is_group_editable")]
|
||||
pub is_group_editable: bool,
|
||||
#[sea_orm(column_name = "group_attribute_schema_is_hardcoded")]
|
||||
pub is_hardcoded: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(has_many = "super::group_attributes::Entity")]
|
||||
GroupAttributes,
|
||||
}
|
||||
|
||||
impl Related<super::GroupAttributes> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::GroupAttributes.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
impl From<Model> for AttributeSchema {
|
||||
fn from(value: Model) -> Self {
|
||||
Self {
|
||||
name: value.attribute_name,
|
||||
attribute_type: value.attribute_type,
|
||||
is_list: value.is_list,
|
||||
is_visible: value.is_group_visible,
|
||||
is_editable: value.is_group_editable,
|
||||
is_hardcoded: value.is_hardcoded,
|
||||
}
|
||||
}
|
||||
}
|
||||
72
server/src/domain/model/group_attributes.rs
Normal file
72
server/src/domain/model/group_attributes.rs
Normal file
@@ -0,0 +1,72 @@
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::domain::types::{AttributeValue, GroupId, Serialized};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "group_attributes")]
|
||||
pub struct Model {
|
||||
#[sea_orm(
|
||||
primary_key,
|
||||
auto_increment = false,
|
||||
column_name = "group_attribute_group_id"
|
||||
)]
|
||||
pub group_id: GroupId,
|
||||
#[sea_orm(
|
||||
primary_key,
|
||||
auto_increment = false,
|
||||
column_name = "group_attribute_name"
|
||||
)]
|
||||
pub attribute_name: String,
|
||||
#[sea_orm(column_name = "group_attribute_value")]
|
||||
pub value: Serialized,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::groups::Entity",
|
||||
from = "Column::GroupId",
|
||||
to = "super::groups::Column::GroupId",
|
||||
on_update = "Cascade",
|
||||
on_delete = "Cascade"
|
||||
)]
|
||||
Groups,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::group_attribute_schema::Entity",
|
||||
from = "Column::AttributeName",
|
||||
to = "super::group_attribute_schema::Column::AttributeName",
|
||||
on_update = "Cascade",
|
||||
on_delete = "Cascade"
|
||||
)]
|
||||
GroupAttributeSchema,
|
||||
}
|
||||
|
||||
impl Related<super::Group> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Groups.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::GroupAttributeSchema> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::GroupAttributeSchema.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
impl From<Model> for AttributeValue {
|
||||
fn from(
|
||||
Model {
|
||||
group_id: _,
|
||||
attribute_name,
|
||||
value,
|
||||
}: Model,
|
||||
) -> Self {
|
||||
Self {
|
||||
name: attribute_name,
|
||||
value,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -37,6 +37,7 @@ impl From<Model> for crate::domain::types::Group {
|
||||
creation_date: group.creation_date,
|
||||
uuid: group.uuid,
|
||||
users: vec![],
|
||||
attributes: Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -48,6 +49,7 @@ impl From<Model> for crate::domain::types::GroupDetails {
|
||||
display_name: group.display_name,
|
||||
creation_date: group.creation_date,
|
||||
uuid: group.uuid,
|
||||
attributes: Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,4 +9,10 @@ pub mod memberships;
|
||||
pub mod password_reset_tokens;
|
||||
pub mod users;
|
||||
|
||||
pub mod user_attribute_schema;
|
||||
pub mod user_attributes;
|
||||
|
||||
pub mod group_attribute_schema;
|
||||
pub mod group_attributes;
|
||||
|
||||
pub use prelude::*;
|
||||
|
||||
@@ -1,5 +1,9 @@
|
||||
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.10.3
|
||||
|
||||
pub use super::group_attribute_schema::Column as GroupAttributeSchemaColumn;
|
||||
pub use super::group_attribute_schema::Entity as GroupAttributeSchema;
|
||||
pub use super::group_attributes::Column as GroupAttributesColumn;
|
||||
pub use super::group_attributes::Entity as GroupAttributes;
|
||||
pub use super::groups::Column as GroupColumn;
|
||||
pub use super::groups::Entity as Group;
|
||||
pub use super::jwt_refresh_storage::Column as JwtRefreshStorageColumn;
|
||||
@@ -10,5 +14,9 @@ pub use super::memberships::Column as MembershipColumn;
|
||||
pub use super::memberships::Entity as Membership;
|
||||
pub use super::password_reset_tokens::Column as PasswordResetTokensColumn;
|
||||
pub use super::password_reset_tokens::Entity as PasswordResetTokens;
|
||||
pub use super::user_attribute_schema::Column as UserAttributeSchemaColumn;
|
||||
pub use super::user_attribute_schema::Entity as UserAttributeSchema;
|
||||
pub use super::user_attributes::Column as UserAttributesColumn;
|
||||
pub use super::user_attributes::Entity as UserAttributes;
|
||||
pub use super::users::Column as UserColumn;
|
||||
pub use super::users::Entity as User;
|
||||
|
||||
52
server/src/domain/model/user_attribute_schema.rs
Normal file
52
server/src/domain/model/user_attribute_schema.rs
Normal file
@@ -0,0 +1,52 @@
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::domain::{handler::AttributeSchema, types::AttributeType};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "user_attribute_schema")]
|
||||
pub struct Model {
|
||||
#[sea_orm(
|
||||
primary_key,
|
||||
auto_increment = false,
|
||||
column_name = "user_attribute_schema_name"
|
||||
)]
|
||||
pub attribute_name: String,
|
||||
#[sea_orm(column_name = "user_attribute_schema_type")]
|
||||
pub attribute_type: AttributeType,
|
||||
#[sea_orm(column_name = "user_attribute_schema_is_list")]
|
||||
pub is_list: bool,
|
||||
#[sea_orm(column_name = "user_attribute_schema_is_user_visible")]
|
||||
pub is_user_visible: bool,
|
||||
#[sea_orm(column_name = "user_attribute_schema_is_user_editable")]
|
||||
pub is_user_editable: bool,
|
||||
#[sea_orm(column_name = "user_attribute_schema_is_hardcoded")]
|
||||
pub is_hardcoded: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(has_many = "super::user_attributes::Entity")]
|
||||
UserAttributes,
|
||||
}
|
||||
|
||||
impl Related<super::UserAttributes> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::UserAttributes.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
impl From<Model> for AttributeSchema {
|
||||
fn from(value: Model) -> Self {
|
||||
Self {
|
||||
name: value.attribute_name,
|
||||
attribute_type: value.attribute_type,
|
||||
is_list: value.is_list,
|
||||
is_visible: value.is_user_visible,
|
||||
is_editable: value.is_user_editable,
|
||||
is_hardcoded: value.is_hardcoded,
|
||||
}
|
||||
}
|
||||
}
|
||||
72
server/src/domain/model/user_attributes.rs
Normal file
72
server/src/domain/model/user_attributes.rs
Normal file
@@ -0,0 +1,72 @@
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::domain::types::{AttributeValue, Serialized, UserId};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "user_attributes")]
|
||||
pub struct Model {
|
||||
#[sea_orm(
|
||||
primary_key,
|
||||
auto_increment = false,
|
||||
column_name = "user_attribute_user_id"
|
||||
)]
|
||||
pub user_id: UserId,
|
||||
#[sea_orm(
|
||||
primary_key,
|
||||
auto_increment = false,
|
||||
column_name = "user_attribute_name"
|
||||
)]
|
||||
pub attribute_name: String,
|
||||
#[sea_orm(column_name = "user_attribute_value")]
|
||||
pub value: Serialized,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::users::Entity",
|
||||
from = "Column::UserId",
|
||||
to = "super::users::Column::UserId",
|
||||
on_update = "Cascade",
|
||||
on_delete = "Cascade"
|
||||
)]
|
||||
Users,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::user_attribute_schema::Entity",
|
||||
from = "Column::AttributeName",
|
||||
to = "super::user_attribute_schema::Column::AttributeName",
|
||||
on_update = "Cascade",
|
||||
on_delete = "Cascade"
|
||||
)]
|
||||
UserAttributeSchema,
|
||||
}
|
||||
|
||||
impl Related<super::User> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Users.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::UserAttributeSchema> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::UserAttributeSchema.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
impl From<Model> for AttributeValue {
|
||||
fn from(
|
||||
Model {
|
||||
user_id: _,
|
||||
attribute_name,
|
||||
value,
|
||||
}: Model,
|
||||
) -> Self {
|
||||
Self {
|
||||
name: attribute_name,
|
||||
value,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3,7 +3,7 @@
|
||||
use sea_orm::{entity::prelude::*, sea_query::BlobSize};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::domain::types::{JpegPhoto, UserId, Uuid};
|
||||
use crate::domain::types::{UserId, Uuid};
|
||||
|
||||
#[derive(Copy, Clone, Default, Debug, DeriveEntity)]
|
||||
pub struct Entity;
|
||||
@@ -15,9 +15,6 @@ pub struct Model {
|
||||
pub user_id: UserId,
|
||||
pub email: String,
|
||||
pub display_name: Option<String>,
|
||||
pub first_name: Option<String>,
|
||||
pub last_name: Option<String>,
|
||||
pub avatar: Option<JpegPhoto>,
|
||||
pub creation_date: chrono::NaiveDateTime,
|
||||
pub password_hash: Option<Vec<u8>>,
|
||||
pub totp_secret: Option<String>,
|
||||
@@ -36,9 +33,6 @@ pub enum Column {
|
||||
UserId,
|
||||
Email,
|
||||
DisplayName,
|
||||
FirstName,
|
||||
LastName,
|
||||
Avatar,
|
||||
CreationDate,
|
||||
PasswordHash,
|
||||
TotpSecret,
|
||||
@@ -54,9 +48,6 @@ impl ColumnTrait for Column {
|
||||
Column::UserId => ColumnType::String(Some(255)),
|
||||
Column::Email => ColumnType::String(Some(255)),
|
||||
Column::DisplayName => ColumnType::String(Some(255)),
|
||||
Column::FirstName => ColumnType::String(Some(255)),
|
||||
Column::LastName => ColumnType::String(Some(255)),
|
||||
Column::Avatar => ColumnType::Binary(BlobSize::Long),
|
||||
Column::CreationDate => ColumnType::DateTime,
|
||||
Column::PasswordHash => ColumnType::Binary(BlobSize::Medium),
|
||||
Column::TotpSecret => ColumnType::String(Some(64)),
|
||||
@@ -124,11 +115,9 @@ impl From<Model> for crate::domain::types::User {
|
||||
user_id: user.user_id,
|
||||
email: user.email,
|
||||
display_name: user.display_name,
|
||||
first_name: user.first_name,
|
||||
last_name: user.last_name,
|
||||
creation_date: user.creation_date,
|
||||
uuid: user.uuid,
|
||||
avatar: user.avatar,
|
||||
attributes: Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -32,6 +32,7 @@ pub mod tests {
|
||||
infra::configuration::ConfigurationBuilder,
|
||||
};
|
||||
use lldap_auth::{opaque, registration};
|
||||
use pretty_assertions::assert_eq;
|
||||
use sea_orm::Database;
|
||||
|
||||
pub fn get_default_config() -> Configuration {
|
||||
@@ -59,7 +60,7 @@ pub mod tests {
|
||||
insert_user_no_password(handler, name).await;
|
||||
let mut rng = rand::rngs::OsRng;
|
||||
let client_registration_start =
|
||||
opaque::client::registration::start_registration(pass, &mut rng).unwrap();
|
||||
opaque::client::registration::start_registration(pass.as_bytes(), &mut rng).unwrap();
|
||||
let response = handler
|
||||
.registration_start(registration::ClientRegistrationStartRequest {
|
||||
username: name.to_string(),
|
||||
|
||||
@@ -5,7 +5,7 @@ use crate::domain::{
|
||||
},
|
||||
model::{self, GroupColumn, MembershipColumn},
|
||||
sql_backend_handler::SqlBackendHandler,
|
||||
types::{Group, GroupDetails, GroupId, Uuid},
|
||||
types::{AttributeValue, Group, GroupDetails, GroupId, Uuid},
|
||||
};
|
||||
use async_trait::async_trait;
|
||||
use sea_orm::{
|
||||
@@ -13,7 +13,7 @@ use sea_orm::{
|
||||
ActiveModelTrait, ActiveValue, ColumnTrait, EntityTrait, QueryFilter, QueryOrder, QuerySelect,
|
||||
QueryTrait,
|
||||
};
|
||||
use tracing::{debug, instrument};
|
||||
use tracing::instrument;
|
||||
|
||||
fn get_group_filter_expr(filter: GroupRequestFilter) -> Cond {
|
||||
use GroupRequestFilter::*;
|
||||
@@ -60,12 +60,10 @@ fn get_group_filter_expr(filter: GroupRequestFilter) -> Cond {
|
||||
|
||||
#[async_trait]
|
||||
impl GroupListerBackendHandler for SqlBackendHandler {
|
||||
#[instrument(skip_all, level = "debug", ret, err)]
|
||||
#[instrument(skip(self), level = "debug", ret, err)]
|
||||
async fn list_groups(&self, filters: Option<GroupRequestFilter>) -> Result<Vec<Group>> {
|
||||
debug!(?filters);
|
||||
let results = model::Group::find()
|
||||
// The order_by must be before find_with_related otherwise the primary order is by group_id.
|
||||
.order_by_asc(GroupColumn::DisplayName)
|
||||
.order_by_asc(GroupColumn::GroupId)
|
||||
.find_with_related(model::Membership)
|
||||
.filter(
|
||||
filters
|
||||
@@ -85,7 +83,7 @@ impl GroupListerBackendHandler for SqlBackendHandler {
|
||||
)
|
||||
.all(&self.sql_pool)
|
||||
.await?;
|
||||
Ok(results
|
||||
let mut groups: Vec<_> = results
|
||||
.into_iter()
|
||||
.map(|(group, users)| {
|
||||
let users: Vec<_> = users.into_iter().map(|u| u.user_id).collect();
|
||||
@@ -94,25 +92,53 @@ impl GroupListerBackendHandler for SqlBackendHandler {
|
||||
..group.into()
|
||||
}
|
||||
})
|
||||
.collect())
|
||||
.collect();
|
||||
let group_ids = groups.iter().map(|u| &u.id);
|
||||
let attributes = model::GroupAttributes::find()
|
||||
.filter(model::GroupAttributesColumn::GroupId.is_in(group_ids))
|
||||
.order_by_asc(model::GroupAttributesColumn::GroupId)
|
||||
.order_by_asc(model::GroupAttributesColumn::AttributeName)
|
||||
.all(&self.sql_pool)
|
||||
.await?;
|
||||
let mut attributes_iter = attributes.into_iter().peekable();
|
||||
use itertools::Itertools; // For take_while_ref
|
||||
for group in groups.iter_mut() {
|
||||
assert!(attributes_iter
|
||||
.peek()
|
||||
.map(|u| u.group_id >= group.id)
|
||||
.unwrap_or(true),
|
||||
"Attributes are not sorted, groups are not sorted, or previous group didn't consume all the attributes");
|
||||
|
||||
group.attributes = attributes_iter
|
||||
.take_while_ref(|u| u.group_id == group.id)
|
||||
.map(AttributeValue::from)
|
||||
.collect();
|
||||
}
|
||||
groups.sort_by(|g1, g2| g1.display_name.cmp(&g2.display_name));
|
||||
Ok(groups)
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl GroupBackendHandler for SqlBackendHandler {
|
||||
#[instrument(skip_all, level = "debug", ret, err)]
|
||||
#[instrument(skip(self), level = "debug", ret, err)]
|
||||
async fn get_group_details(&self, group_id: GroupId) -> Result<GroupDetails> {
|
||||
debug!(?group_id);
|
||||
model::Group::find_by_id(group_id)
|
||||
.into_model::<GroupDetails>()
|
||||
let mut group_details = model::Group::find_by_id(group_id)
|
||||
.one(&self.sql_pool)
|
||||
.await?
|
||||
.ok_or_else(|| DomainError::EntityNotFound(format!("{:?}", group_id)))
|
||||
.map(Into::<GroupDetails>::into)
|
||||
.ok_or_else(|| DomainError::EntityNotFound(format!("{:?}", group_id)))?;
|
||||
let attributes = model::GroupAttributes::find()
|
||||
.filter(model::GroupAttributesColumn::GroupId.eq(group_details.group_id))
|
||||
.order_by_asc(model::GroupAttributesColumn::AttributeName)
|
||||
.all(&self.sql_pool)
|
||||
.await?;
|
||||
group_details.attributes = attributes.into_iter().map(AttributeValue::from).collect();
|
||||
Ok(group_details)
|
||||
}
|
||||
|
||||
#[instrument(skip_all, level = "debug", err)]
|
||||
#[instrument(skip(self), level = "debug", err, fields(group_id = ?request.group_id))]
|
||||
async fn update_group(&self, request: UpdateGroupRequest) -> Result<()> {
|
||||
debug!(?request.group_id);
|
||||
let update_group = model::groups::ActiveModel {
|
||||
group_id: ActiveValue::Set(request.group_id),
|
||||
display_name: request
|
||||
@@ -125,9 +151,8 @@ impl GroupBackendHandler for SqlBackendHandler {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[instrument(skip_all, level = "debug", ret, err)]
|
||||
#[instrument(skip(self), level = "debug", ret, err)]
|
||||
async fn create_group(&self, group_name: &str) -> Result<GroupId> {
|
||||
debug!(?group_name);
|
||||
let now = chrono::Utc::now().naive_utc();
|
||||
let uuid = Uuid::from_name_and_date(group_name, &now);
|
||||
let new_group = model::groups::ActiveModel {
|
||||
@@ -139,9 +164,8 @@ impl GroupBackendHandler for SqlBackendHandler {
|
||||
Ok(new_group.insert(&self.sql_pool).await?.group_id)
|
||||
}
|
||||
|
||||
#[instrument(skip_all, level = "debug", err)]
|
||||
#[instrument(skip(self), level = "debug", err)]
|
||||
async fn delete_group(&self, group_id: GroupId) -> Result<()> {
|
||||
debug!(?group_id);
|
||||
let res = model::Group::delete_by_id(group_id)
|
||||
.exec(&self.sql_pool)
|
||||
.await?;
|
||||
@@ -159,6 +183,7 @@ impl GroupBackendHandler for SqlBackendHandler {
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::domain::{handler::SubStringFilter, sql_backend_handler::tests::*, types::UserId};
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
async fn get_group_ids(
|
||||
handler: &SqlBackendHandler,
|
||||
|
||||
@@ -1,19 +1,19 @@
|
||||
use crate::domain::{
|
||||
sql_tables::{DbConnection, SchemaVersion},
|
||||
types::{GroupId, UserId, Uuid},
|
||||
sql_tables::{DbConnection, SchemaVersion, LAST_SCHEMA_VERSION},
|
||||
types::{AttributeType, GroupId, JpegPhoto, Serialized, UserId, Uuid},
|
||||
};
|
||||
use anyhow::Context;
|
||||
use itertools::Itertools;
|
||||
use sea_orm::{
|
||||
sea_query::{
|
||||
self, all, ColumnDef, Expr, ForeignKey, ForeignKeyAction, Func, Index, Query, Table, Value,
|
||||
},
|
||||
ConnectionTrait, FromQueryResult, Iden, Order, Statement, TransactionTrait,
|
||||
ConnectionTrait, DatabaseTransaction, DbErr, DeriveIden, FromQueryResult, Iden, Order,
|
||||
Statement, TransactionTrait,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tracing::{info, instrument, warn};
|
||||
use tracing::{error, info, instrument, warn};
|
||||
|
||||
#[derive(Iden, PartialEq, Eq, Debug, Serialize, Deserialize, Clone, Copy)]
|
||||
#[derive(DeriveIden, PartialEq, Eq, Debug, Serialize, Deserialize, Clone, Copy)]
|
||||
pub enum Users {
|
||||
Table,
|
||||
UserId,
|
||||
@@ -29,7 +29,7 @@ pub enum Users {
|
||||
Uuid,
|
||||
}
|
||||
|
||||
#[derive(Iden, PartialEq, Eq, Debug, Serialize, Deserialize, Clone, Copy)]
|
||||
#[derive(DeriveIden, PartialEq, Eq, Debug, Serialize, Deserialize, Clone, Copy)]
|
||||
pub enum Groups {
|
||||
Table,
|
||||
GroupId,
|
||||
@@ -38,15 +38,53 @@ pub enum Groups {
|
||||
Uuid,
|
||||
}
|
||||
|
||||
#[derive(Iden, Clone, Copy)]
|
||||
#[derive(DeriveIden, Clone, Copy)]
|
||||
pub enum Memberships {
|
||||
Table,
|
||||
UserId,
|
||||
GroupId,
|
||||
}
|
||||
|
||||
#[derive(DeriveIden, PartialEq, Eq, Debug, Serialize, Deserialize, Clone, Copy)]
|
||||
pub enum UserAttributeSchema {
|
||||
Table,
|
||||
UserAttributeSchemaName,
|
||||
UserAttributeSchemaType,
|
||||
UserAttributeSchemaIsList,
|
||||
UserAttributeSchemaIsUserVisible,
|
||||
UserAttributeSchemaIsUserEditable,
|
||||
UserAttributeSchemaIsHardcoded,
|
||||
}
|
||||
|
||||
#[derive(DeriveIden, PartialEq, Eq, Debug, Serialize, Deserialize, Clone, Copy)]
|
||||
pub enum UserAttributes {
|
||||
Table,
|
||||
UserAttributeUserId,
|
||||
UserAttributeName,
|
||||
UserAttributeValue,
|
||||
}
|
||||
|
||||
#[derive(DeriveIden, PartialEq, Eq, Debug, Serialize, Deserialize, Clone, Copy)]
|
||||
pub enum GroupAttributeSchema {
|
||||
Table,
|
||||
GroupAttributeSchemaName,
|
||||
GroupAttributeSchemaType,
|
||||
GroupAttributeSchemaIsList,
|
||||
GroupAttributeSchemaIsGroupVisible,
|
||||
GroupAttributeSchemaIsGroupEditable,
|
||||
GroupAttributeSchemaIsHardcoded,
|
||||
}
|
||||
|
||||
#[derive(DeriveIden, PartialEq, Eq, Debug, Serialize, Deserialize, Clone, Copy)]
|
||||
pub enum GroupAttributes {
|
||||
Table,
|
||||
GroupAttributeGroupId,
|
||||
GroupAttributeName,
|
||||
GroupAttributeValue,
|
||||
}
|
||||
|
||||
// Metadata about the SQL DB.
|
||||
#[derive(Iden)]
|
||||
#[derive(DeriveIden)]
|
||||
pub enum Metadata {
|
||||
Table,
|
||||
// Which version of the schema we're at.
|
||||
@@ -337,72 +375,64 @@ pub async fn upgrade_to_v1(pool: &DbConnection) -> std::result::Result<(), sea_o
|
||||
}
|
||||
|
||||
async fn replace_column<I: Iden + Copy + 'static, const N: usize>(
|
||||
pool: &DbConnection,
|
||||
transaction: DatabaseTransaction,
|
||||
table_name: I,
|
||||
column_name: I,
|
||||
mut new_column: ColumnDef,
|
||||
update_values: [Statement; N],
|
||||
) -> anyhow::Result<()> {
|
||||
) -> Result<DatabaseTransaction, DbErr> {
|
||||
// Update the definition of a column (in a compatible way). Due to Sqlite, this is more complicated:
|
||||
// - rename the column to a temporary name
|
||||
// - create the column with the new definition
|
||||
// - copy the data from the temp column to the new one
|
||||
// - update the new one if there are changes needed
|
||||
// - drop the old one
|
||||
let builder = pool.get_database_backend();
|
||||
pool.transaction::<_, (), sea_orm::DbErr>(move |transaction| {
|
||||
Box::pin(async move {
|
||||
#[derive(Iden)]
|
||||
enum TempTable {
|
||||
TempName,
|
||||
}
|
||||
transaction
|
||||
.execute(
|
||||
builder.build(
|
||||
Table::alter()
|
||||
.table(table_name)
|
||||
.rename_column(column_name, TempTable::TempName),
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
transaction
|
||||
.execute(
|
||||
builder.build(Table::alter().table(table_name).add_column(&mut new_column)),
|
||||
)
|
||||
.await?;
|
||||
transaction
|
||||
.execute(
|
||||
builder.build(
|
||||
Query::update()
|
||||
.table(table_name)
|
||||
.value(column_name, Expr::col((table_name, TempTable::TempName))),
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
for statement in update_values {
|
||||
transaction.execute(statement).await?;
|
||||
}
|
||||
transaction
|
||||
.execute(
|
||||
builder.build(
|
||||
Table::alter()
|
||||
.table(table_name)
|
||||
.drop_column(TempTable::TempName),
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
Ok(())
|
||||
})
|
||||
})
|
||||
.await?;
|
||||
Ok(())
|
||||
let builder = transaction.get_database_backend();
|
||||
#[derive(DeriveIden)]
|
||||
enum TempTable {
|
||||
TempName,
|
||||
}
|
||||
transaction
|
||||
.execute(
|
||||
builder.build(
|
||||
Table::alter()
|
||||
.table(table_name)
|
||||
.rename_column(column_name, TempTable::TempName),
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
transaction
|
||||
.execute(builder.build(Table::alter().table(table_name).add_column(&mut new_column)))
|
||||
.await?;
|
||||
transaction
|
||||
.execute(
|
||||
builder.build(
|
||||
Query::update()
|
||||
.table(table_name)
|
||||
.value(column_name, Expr::col((table_name, TempTable::TempName))),
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
for statement in update_values {
|
||||
transaction.execute(statement).await?;
|
||||
}
|
||||
transaction
|
||||
.execute(
|
||||
builder.build(
|
||||
Table::alter()
|
||||
.table(table_name)
|
||||
.drop_column(TempTable::TempName),
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
Ok(transaction)
|
||||
}
|
||||
|
||||
async fn migrate_to_v2(pool: &DbConnection) -> anyhow::Result<()> {
|
||||
let builder = pool.get_database_backend();
|
||||
async fn migrate_to_v2(transaction: DatabaseTransaction) -> Result<DatabaseTransaction, DbErr> {
|
||||
let builder = transaction.get_database_backend();
|
||||
// Allow nulls in DisplayName, and change empty string to null.
|
||||
replace_column(
|
||||
pool,
|
||||
let transaction = replace_column(
|
||||
transaction,
|
||||
Users::Table,
|
||||
Users::DisplayName,
|
||||
ColumnDef::new(Users::DisplayName)
|
||||
@@ -416,14 +446,14 @@ async fn migrate_to_v2(pool: &DbConnection) -> anyhow::Result<()> {
|
||||
)],
|
||||
)
|
||||
.await?;
|
||||
Ok(())
|
||||
Ok(transaction)
|
||||
}
|
||||
|
||||
async fn migrate_to_v3(pool: &DbConnection) -> anyhow::Result<()> {
|
||||
let builder = pool.get_database_backend();
|
||||
async fn migrate_to_v3(transaction: DatabaseTransaction) -> Result<DatabaseTransaction, DbErr> {
|
||||
let builder = transaction.get_database_backend();
|
||||
// Allow nulls in First and LastName. Users who created their DB in 0.4.1 have the not null constraint.
|
||||
replace_column(
|
||||
pool,
|
||||
let transaction = replace_column(
|
||||
transaction,
|
||||
Users::Table,
|
||||
Users::FirstName,
|
||||
ColumnDef::new(Users::FirstName).string_len(255).to_owned(),
|
||||
@@ -435,8 +465,8 @@ async fn migrate_to_v3(pool: &DbConnection) -> anyhow::Result<()> {
|
||||
)],
|
||||
)
|
||||
.await?;
|
||||
replace_column(
|
||||
pool,
|
||||
let transaction = replace_column(
|
||||
transaction,
|
||||
Users::Table,
|
||||
Users::LastName,
|
||||
ColumnDef::new(Users::LastName).string_len(255).to_owned(),
|
||||
@@ -449,8 +479,8 @@ async fn migrate_to_v3(pool: &DbConnection) -> anyhow::Result<()> {
|
||||
)
|
||||
.await?;
|
||||
// Change Avatar from binary to blob(long), because for MySQL this is 64kb.
|
||||
replace_column(
|
||||
pool,
|
||||
let transaction = replace_column(
|
||||
transaction,
|
||||
Users::Table,
|
||||
Users::Avatar,
|
||||
ColumnDef::new(Users::Avatar)
|
||||
@@ -459,13 +489,13 @@ async fn migrate_to_v3(pool: &DbConnection) -> anyhow::Result<()> {
|
||||
[],
|
||||
)
|
||||
.await?;
|
||||
Ok(())
|
||||
Ok(transaction)
|
||||
}
|
||||
|
||||
async fn migrate_to_v4(pool: &DbConnection) -> anyhow::Result<()> {
|
||||
let builder = pool.get_database_backend();
|
||||
async fn migrate_to_v4(transaction: DatabaseTransaction) -> Result<DatabaseTransaction, DbErr> {
|
||||
let builder = transaction.get_database_backend();
|
||||
// Make emails and UUIDs unique.
|
||||
if let Err(e) = pool
|
||||
if let Err(e) = transaction
|
||||
.execute(
|
||||
builder.build(
|
||||
Index::create()
|
||||
@@ -477,16 +507,16 @@ async fn migrate_to_v4(pool: &DbConnection) -> anyhow::Result<()> {
|
||||
),
|
||||
)
|
||||
.await
|
||||
.context(
|
||||
r#"while enforcing unicity on emails (2 users have the same email).
|
||||
{
|
||||
error!(
|
||||
r#"Found several users with the same email.
|
||||
|
||||
See https://github.com/lldap/lldap/blob/main/docs/migration_guides/v0.5.md for details.
|
||||
|
||||
Conflicting emails:
|
||||
"#,
|
||||
)
|
||||
{
|
||||
warn!("Found several users with the same email:");
|
||||
for (email, users) in &pool
|
||||
);
|
||||
for (email, users) in &transaction
|
||||
.query_all(
|
||||
builder.build(
|
||||
Query::select()
|
||||
@@ -528,39 +558,329 @@ See https://github.com/lldap/lldap/blob/main/docs/migration_guides/v0.5.md for d
|
||||
}
|
||||
return Err(e);
|
||||
}
|
||||
pool.execute(
|
||||
builder.build(
|
||||
Index::create()
|
||||
.if_not_exists()
|
||||
.name("unique-user-uuid")
|
||||
.table(Users::Table)
|
||||
.col(Users::Uuid)
|
||||
.unique(),
|
||||
),
|
||||
)
|
||||
.await
|
||||
.context("while enforcing unicity on user UUIDs (2 users have the same UUID)")?;
|
||||
pool.execute(
|
||||
builder.build(
|
||||
Index::create()
|
||||
.if_not_exists()
|
||||
.name("unique-group-uuid")
|
||||
.table(Groups::Table)
|
||||
.col(Groups::Uuid)
|
||||
.unique(),
|
||||
),
|
||||
)
|
||||
.await
|
||||
.context("while enforcing unicity on group UUIDs (2 groups have the same UUID)")?;
|
||||
Ok(())
|
||||
transaction
|
||||
.execute(
|
||||
builder.build(
|
||||
Index::create()
|
||||
.if_not_exists()
|
||||
.name("unique-user-uuid")
|
||||
.table(Users::Table)
|
||||
.col(Users::Uuid)
|
||||
.unique(),
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
transaction
|
||||
.execute(
|
||||
builder.build(
|
||||
Index::create()
|
||||
.if_not_exists()
|
||||
.name("unique-group-uuid")
|
||||
.table(Groups::Table)
|
||||
.col(Groups::Uuid)
|
||||
.unique(),
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
Ok(transaction)
|
||||
}
|
||||
|
||||
async fn migrate_to_v5(transaction: DatabaseTransaction) -> Result<DatabaseTransaction, DbErr> {
|
||||
let builder = transaction.get_database_backend();
|
||||
transaction
|
||||
.execute(
|
||||
builder.build(
|
||||
Table::create()
|
||||
.table(UserAttributeSchema::Table)
|
||||
.col(
|
||||
ColumnDef::new(UserAttributeSchema::UserAttributeSchemaName)
|
||||
.string_len(64)
|
||||
.not_null()
|
||||
.primary_key(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(UserAttributeSchema::UserAttributeSchemaType)
|
||||
.string_len(64)
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(UserAttributeSchema::UserAttributeSchemaIsList)
|
||||
.boolean()
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(UserAttributeSchema::UserAttributeSchemaIsUserVisible)
|
||||
.boolean()
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(UserAttributeSchema::UserAttributeSchemaIsUserEditable)
|
||||
.boolean()
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(UserAttributeSchema::UserAttributeSchemaIsHardcoded)
|
||||
.boolean()
|
||||
.not_null(),
|
||||
),
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
|
||||
transaction
|
||||
.execute(
|
||||
builder.build(
|
||||
Table::create()
|
||||
.table(GroupAttributeSchema::Table)
|
||||
.col(
|
||||
ColumnDef::new(GroupAttributeSchema::GroupAttributeSchemaName)
|
||||
.string_len(64)
|
||||
.not_null()
|
||||
.primary_key(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(GroupAttributeSchema::GroupAttributeSchemaType)
|
||||
.string_len(64)
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(GroupAttributeSchema::GroupAttributeSchemaIsList)
|
||||
.boolean()
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(GroupAttributeSchema::GroupAttributeSchemaIsGroupVisible)
|
||||
.boolean()
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(GroupAttributeSchema::GroupAttributeSchemaIsGroupEditable)
|
||||
.boolean()
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(GroupAttributeSchema::GroupAttributeSchemaIsHardcoded)
|
||||
.boolean()
|
||||
.not_null(),
|
||||
),
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
|
||||
transaction
|
||||
.execute(
|
||||
builder.build(
|
||||
Table::create()
|
||||
.table(UserAttributes::Table)
|
||||
.col(
|
||||
ColumnDef::new(UserAttributes::UserAttributeUserId)
|
||||
.string_len(255)
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(UserAttributes::UserAttributeName)
|
||||
.string_len(64)
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(UserAttributes::UserAttributeValue)
|
||||
.blob(sea_query::BlobSize::Long)
|
||||
.not_null(),
|
||||
)
|
||||
.foreign_key(
|
||||
ForeignKey::create()
|
||||
.name("UserAttributeUserIdForeignKey")
|
||||
.from(UserAttributes::Table, UserAttributes::UserAttributeUserId)
|
||||
.to(Users::Table, Users::UserId)
|
||||
.on_delete(ForeignKeyAction::Cascade)
|
||||
.on_update(ForeignKeyAction::Cascade),
|
||||
)
|
||||
.foreign_key(
|
||||
ForeignKey::create()
|
||||
.name("UserAttributeNameForeignKey")
|
||||
.from(UserAttributes::Table, UserAttributes::UserAttributeName)
|
||||
.to(
|
||||
UserAttributeSchema::Table,
|
||||
UserAttributeSchema::UserAttributeSchemaName,
|
||||
)
|
||||
.on_delete(ForeignKeyAction::Cascade)
|
||||
.on_update(ForeignKeyAction::Cascade),
|
||||
)
|
||||
.primary_key(
|
||||
Index::create()
|
||||
.col(UserAttributes::UserAttributeUserId)
|
||||
.col(UserAttributes::UserAttributeName),
|
||||
),
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
|
||||
transaction
|
||||
.execute(
|
||||
builder.build(
|
||||
Table::create()
|
||||
.table(GroupAttributes::Table)
|
||||
.col(
|
||||
ColumnDef::new(GroupAttributes::GroupAttributeGroupId)
|
||||
.integer()
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(GroupAttributes::GroupAttributeName)
|
||||
.string_len(64)
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(GroupAttributes::GroupAttributeValue)
|
||||
.blob(sea_query::BlobSize::Long)
|
||||
.not_null(),
|
||||
)
|
||||
.foreign_key(
|
||||
ForeignKey::create()
|
||||
.name("GroupAttributeGroupIdForeignKey")
|
||||
.from(
|
||||
GroupAttributes::Table,
|
||||
GroupAttributes::GroupAttributeGroupId,
|
||||
)
|
||||
.to(Groups::Table, Groups::GroupId)
|
||||
.on_delete(ForeignKeyAction::Cascade)
|
||||
.on_update(ForeignKeyAction::Cascade),
|
||||
)
|
||||
.foreign_key(
|
||||
ForeignKey::create()
|
||||
.name("GroupAttributeNameForeignKey")
|
||||
.from(GroupAttributes::Table, GroupAttributes::GroupAttributeName)
|
||||
.to(
|
||||
GroupAttributeSchema::Table,
|
||||
GroupAttributeSchema::GroupAttributeSchemaName,
|
||||
)
|
||||
.on_delete(ForeignKeyAction::Cascade)
|
||||
.on_update(ForeignKeyAction::Cascade),
|
||||
)
|
||||
.primary_key(
|
||||
Index::create()
|
||||
.col(GroupAttributes::GroupAttributeGroupId)
|
||||
.col(GroupAttributes::GroupAttributeName),
|
||||
),
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
|
||||
transaction
|
||||
.execute(
|
||||
builder.build(
|
||||
Query::insert()
|
||||
.into_table(UserAttributeSchema::Table)
|
||||
.columns([
|
||||
UserAttributeSchema::UserAttributeSchemaName,
|
||||
UserAttributeSchema::UserAttributeSchemaType,
|
||||
UserAttributeSchema::UserAttributeSchemaIsList,
|
||||
UserAttributeSchema::UserAttributeSchemaIsUserVisible,
|
||||
UserAttributeSchema::UserAttributeSchemaIsUserEditable,
|
||||
UserAttributeSchema::UserAttributeSchemaIsHardcoded,
|
||||
])
|
||||
.values_panic([
|
||||
"first_name".into(),
|
||||
AttributeType::String.into(),
|
||||
false.into(),
|
||||
true.into(),
|
||||
true.into(),
|
||||
true.into(),
|
||||
])
|
||||
.values_panic([
|
||||
"last_name".into(),
|
||||
AttributeType::String.into(),
|
||||
false.into(),
|
||||
true.into(),
|
||||
true.into(),
|
||||
true.into(),
|
||||
])
|
||||
.values_panic([
|
||||
"avatar".into(),
|
||||
AttributeType::JpegPhoto.into(),
|
||||
false.into(),
|
||||
true.into(),
|
||||
true.into(),
|
||||
true.into(),
|
||||
]),
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
|
||||
{
|
||||
let mut user_statement = Query::insert()
|
||||
.into_table(UserAttributes::Table)
|
||||
.columns([
|
||||
UserAttributes::UserAttributeUserId,
|
||||
UserAttributes::UserAttributeName,
|
||||
UserAttributes::UserAttributeValue,
|
||||
])
|
||||
.to_owned();
|
||||
#[derive(FromQueryResult)]
|
||||
struct FullUserDetails {
|
||||
user_id: UserId,
|
||||
first_name: Option<String>,
|
||||
last_name: Option<String>,
|
||||
avatar: Option<JpegPhoto>,
|
||||
}
|
||||
let mut any_user = false;
|
||||
for user in FullUserDetails::find_by_statement(builder.build(
|
||||
Query::select().from(Users::Table).columns([
|
||||
Users::UserId,
|
||||
Users::FirstName,
|
||||
Users::LastName,
|
||||
Users::Avatar,
|
||||
]),
|
||||
))
|
||||
.all(&transaction)
|
||||
.await?
|
||||
{
|
||||
if let Some(name) = &user.first_name {
|
||||
any_user = true;
|
||||
user_statement.values_panic([
|
||||
user.user_id.clone().into(),
|
||||
"first_name".into(),
|
||||
Serialized::from(name).into(),
|
||||
]);
|
||||
}
|
||||
if let Some(name) = &user.last_name {
|
||||
any_user = true;
|
||||
user_statement.values_panic([
|
||||
user.user_id.clone().into(),
|
||||
"last_name".into(),
|
||||
Serialized::from(name).into(),
|
||||
]);
|
||||
}
|
||||
if let Some(avatar) = &user.avatar {
|
||||
any_user = true;
|
||||
user_statement.values_panic([
|
||||
user.user_id.clone().into(),
|
||||
"avatar".into(),
|
||||
Serialized::from(avatar).into(),
|
||||
]);
|
||||
}
|
||||
}
|
||||
|
||||
if any_user {
|
||||
transaction.execute(builder.build(&user_statement)).await?;
|
||||
}
|
||||
}
|
||||
|
||||
for column in [Users::FirstName, Users::LastName, Users::Avatar] {
|
||||
transaction
|
||||
.execute(builder.build(Table::alter().table(Users::Table).drop_column(column)))
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(transaction)
|
||||
}
|
||||
|
||||
// This is needed to make an array of async functions.
|
||||
macro_rules! to_sync {
|
||||
($l:ident) => {
|
||||
|pool| -> std::pin::Pin<Box<dyn std::future::Future<Output = anyhow::Result<()>>>> {
|
||||
Box::pin($l(pool))
|
||||
}
|
||||
move |transaction| -> std::pin::Pin<
|
||||
Box<dyn std::future::Future<Output = Result<DatabaseTransaction, DbErr>>>,
|
||||
> { Box::pin($l(transaction)) }
|
||||
};
|
||||
}
|
||||
|
||||
@@ -579,21 +899,26 @@ pub async fn migrate_from_version(
|
||||
to_sync!(migrate_to_v2),
|
||||
to_sync!(migrate_to_v3),
|
||||
to_sync!(migrate_to_v4),
|
||||
to_sync!(migrate_to_v5),
|
||||
];
|
||||
for migration in 2..=4 {
|
||||
assert_eq!(migrations.len(), (LAST_SCHEMA_VERSION.0 - 1) as usize);
|
||||
for migration in 2..=last_version.0 {
|
||||
if version < SchemaVersion(migration) && SchemaVersion(migration) <= last_version {
|
||||
info!("Upgrading DB schema to version {}", migration);
|
||||
migrations[(migration - 2) as usize](pool).await?;
|
||||
let transaction = pool.begin().await?;
|
||||
let transaction = migrations[(migration - 2) as usize](transaction).await?;
|
||||
let builder = transaction.get_database_backend();
|
||||
transaction
|
||||
.execute(
|
||||
builder.build(
|
||||
Query::update()
|
||||
.table(Metadata::Table)
|
||||
.value(Metadata::Version, Value::from(migration)),
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
transaction.commit().await?;
|
||||
}
|
||||
}
|
||||
let builder = pool.get_database_backend();
|
||||
pool.execute(
|
||||
builder.build(
|
||||
Query::update()
|
||||
.table(Metadata::Table)
|
||||
.value(Metadata::Version, Value::from(last_version)),
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -15,7 +15,7 @@ use tracing::{debug, instrument};
|
||||
|
||||
type SqlOpaqueHandler = SqlBackendHandler;
|
||||
|
||||
#[instrument(skip_all, level = "debug", err)]
|
||||
#[instrument(skip_all, level = "debug", err, fields(username = %username.as_str()))]
|
||||
fn passwords_match(
|
||||
password_file_bytes: &[u8],
|
||||
clear_password: &str,
|
||||
@@ -49,7 +49,7 @@ impl SqlBackendHandler {
|
||||
)?)
|
||||
}
|
||||
|
||||
#[instrument(skip_all, level = "debug", err)]
|
||||
#[instrument(skip(self), level = "debug", err)]
|
||||
async fn get_password_file_for_user(&self, user_id: UserId) -> Result<Option<Vec<u8>>> {
|
||||
// Fetch the previously registered password file from the DB.
|
||||
Ok(model::User::find_by_id(user_id)
|
||||
@@ -201,7 +201,7 @@ impl OpaqueHandler for SqlOpaqueHandler {
|
||||
}
|
||||
|
||||
/// Convenience function to set a user's password.
|
||||
#[instrument(skip_all, level = "debug", err)]
|
||||
#[instrument(skip_all, level = "debug", err, fields(username = %username.as_str()))]
|
||||
pub(crate) async fn register_password(
|
||||
opaque_handler: &SqlOpaqueHandler,
|
||||
username: &UserId,
|
||||
@@ -210,7 +210,7 @@ pub(crate) async fn register_password(
|
||||
let mut rng = rand::rngs::OsRng;
|
||||
use registration::*;
|
||||
let registration_start =
|
||||
opaque::client::registration::start_registration(password.unsecure(), &mut rng)?;
|
||||
opaque::client::registration::start_registration(password.unsecure().as_bytes(), &mut rng)?;
|
||||
let start_response = opaque_handler
|
||||
.registration_start(ClientRegistrationStartRequest {
|
||||
username: username.to_string(),
|
||||
|
||||
96
server/src/domain/sql_schema_backend_handler.rs
Normal file
96
server/src/domain/sql_schema_backend_handler.rs
Normal file
@@ -0,0 +1,96 @@
|
||||
use crate::domain::{
|
||||
error::Result,
|
||||
handler::{AttributeSchema, Schema, SchemaBackendHandler},
|
||||
model,
|
||||
sql_backend_handler::SqlBackendHandler,
|
||||
};
|
||||
use async_trait::async_trait;
|
||||
use sea_orm::{EntityTrait, QueryOrder};
|
||||
|
||||
use super::handler::AttributeList;
|
||||
|
||||
#[async_trait]
|
||||
impl SchemaBackendHandler for SqlBackendHandler {
|
||||
async fn get_schema(&self) -> Result<Schema> {
|
||||
Ok(Schema {
|
||||
user_attributes: AttributeList {
|
||||
attributes: self.get_user_attributes().await?,
|
||||
},
|
||||
group_attributes: AttributeList {
|
||||
attributes: self.get_group_attributes().await?,
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl SqlBackendHandler {
|
||||
async fn get_user_attributes(&self) -> Result<Vec<AttributeSchema>> {
|
||||
Ok(model::UserAttributeSchema::find()
|
||||
.order_by_asc(model::UserAttributeSchemaColumn::AttributeName)
|
||||
.all(&self.sql_pool)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|m| m.into())
|
||||
.collect())
|
||||
}
|
||||
|
||||
async fn get_group_attributes(&self) -> Result<Vec<AttributeSchema>> {
|
||||
Ok(model::GroupAttributeSchema::find()
|
||||
.order_by_asc(model::GroupAttributeSchemaColumn::AttributeName)
|
||||
.all(&self.sql_pool)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|m| m.into())
|
||||
.collect())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::domain::{
|
||||
handler::AttributeList, sql_backend_handler::tests::*, types::AttributeType,
|
||||
};
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_default_schema() {
|
||||
let fixture = TestFixture::new().await;
|
||||
assert_eq!(
|
||||
fixture.handler.get_schema().await.unwrap(),
|
||||
Schema {
|
||||
user_attributes: AttributeList {
|
||||
attributes: vec![
|
||||
AttributeSchema {
|
||||
name: "avatar".to_owned(),
|
||||
attribute_type: AttributeType::JpegPhoto,
|
||||
is_list: false,
|
||||
is_visible: true,
|
||||
is_editable: true,
|
||||
is_hardcoded: true,
|
||||
},
|
||||
AttributeSchema {
|
||||
name: "first_name".to_owned(),
|
||||
attribute_type: AttributeType::String,
|
||||
is_list: false,
|
||||
is_visible: true,
|
||||
is_editable: true,
|
||||
is_hardcoded: true,
|
||||
},
|
||||
AttributeSchema {
|
||||
name: "last_name".to_owned(),
|
||||
attribute_type: AttributeType::String,
|
||||
is_list: false,
|
||||
is_visible: true,
|
||||
is_editable: true,
|
||||
is_hardcoded: true,
|
||||
}
|
||||
]
|
||||
},
|
||||
group_attributes: AttributeList {
|
||||
attributes: Vec::new()
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
}
|
||||
@@ -1,27 +1,12 @@
|
||||
use super::sql_migrations::{get_schema_version, migrate_from_version, upgrade_to_v1};
|
||||
use sea_orm::Value;
|
||||
use sea_orm::{DeriveValueType, QueryResult, Value};
|
||||
|
||||
pub type DbConnection = sea_orm::DatabaseConnection;
|
||||
|
||||
#[derive(Copy, PartialEq, Eq, Debug, Clone, PartialOrd, Ord)]
|
||||
#[derive(Copy, PartialEq, Eq, Debug, Clone, PartialOrd, Ord, DeriveValueType)]
|
||||
pub struct SchemaVersion(pub i16);
|
||||
|
||||
impl sea_orm::TryGetable for SchemaVersion {
|
||||
fn try_get_by<I: sea_orm::ColIdx>(
|
||||
res: &sea_orm::QueryResult,
|
||||
index: I,
|
||||
) -> Result<Self, sea_orm::TryGetError> {
|
||||
Ok(SchemaVersion(i16::try_get_by(res, index)?))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<SchemaVersion> for Value {
|
||||
fn from(version: SchemaVersion) -> Self {
|
||||
version.0.into()
|
||||
}
|
||||
}
|
||||
|
||||
const LAST_SCHEMA_VERSION: SchemaVersion = SchemaVersion(4);
|
||||
pub const LAST_SCHEMA_VERSION: SchemaVersion = SchemaVersion(5);
|
||||
|
||||
pub async fn init_table(pool: &DbConnection) -> anyhow::Result<()> {
|
||||
let version = {
|
||||
@@ -40,8 +25,9 @@ pub async fn init_table(pool: &DbConnection) -> anyhow::Result<()> {
|
||||
mod tests {
|
||||
use crate::domain::{
|
||||
sql_migrations,
|
||||
types::{GroupId, Uuid},
|
||||
types::{GroupId, JpegPhoto, Serialized, Uuid},
|
||||
};
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
use super::*;
|
||||
use chrono::prelude::*;
|
||||
@@ -62,10 +48,22 @@ mod tests {
|
||||
async fn test_init_table() {
|
||||
let sql_pool = get_in_memory_db().await;
|
||||
init_table(&sql_pool).await.unwrap();
|
||||
sql_pool.execute(raw_statement(
|
||||
r#"INSERT INTO users
|
||||
(user_id, email, display_name, first_name, last_name, creation_date, password_hash, uuid)
|
||||
VALUES ("bôb", "böb@bob.bob", "Bob Bobbersön", "Bob", "Bobberson", "1970-01-01 00:00:00", "bob00", "abc")"#)).await.unwrap();
|
||||
sql_pool
|
||||
.execute(raw_statement(
|
||||
r#"INSERT INTO users
|
||||
(user_id, email, display_name, creation_date, password_hash, uuid)
|
||||
VALUES ("bôb", "böb@bob.bob", "Bob Bobbersön", "1970-01-01 00:00:00", "bob00", "abc")"#,
|
||||
))
|
||||
.await
|
||||
.unwrap();
|
||||
sql_pool
|
||||
.execute(raw_statement(
|
||||
r#"INSERT INTO user_attributes
|
||||
(user_attribute_user_id, user_attribute_name, user_attribute_value)
|
||||
VALUES ("bôb", "first_name", "Bob")"#,
|
||||
))
|
||||
.await
|
||||
.unwrap();
|
||||
#[derive(FromQueryResult, PartialEq, Eq, Debug)]
|
||||
struct ShortUserDetails {
|
||||
display_name: String,
|
||||
@@ -97,11 +95,12 @@ mod tests {
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_migrate_tables() {
|
||||
crate::infra::logging::init_for_tests();
|
||||
// Test that we add the column creation_date to groups and uuid to users and groups.
|
||||
let sql_pool = get_in_memory_db().await;
|
||||
sql_pool
|
||||
.execute(raw_statement(
|
||||
r#"CREATE TABLE users ( user_id TEXT, display_name TEXT, first_name TEXT NOT NULL, last_name TEXT, avatar BLOB, creation_date TEXT, email TEXT);"#,
|
||||
r#"CREATE TABLE users ( user_id TEXT PRIMARY KEY, display_name TEXT, first_name TEXT NOT NULL, last_name TEXT, avatar BLOB, creation_date TEXT, email TEXT);"#,
|
||||
))
|
||||
.await
|
||||
.unwrap();
|
||||
@@ -143,12 +142,11 @@ mod tests {
|
||||
#[derive(FromQueryResult, PartialEq, Eq, Debug)]
|
||||
struct SimpleUser {
|
||||
display_name: Option<String>,
|
||||
first_name: Option<String>,
|
||||
uuid: Uuid,
|
||||
}
|
||||
assert_eq!(
|
||||
SimpleUser::find_by_statement(raw_statement(
|
||||
r#"SELECT display_name, first_name, uuid FROM users ORDER BY display_name"#
|
||||
r#"SELECT display_name, uuid FROM users ORDER BY display_name"#
|
||||
))
|
||||
.all(&sql_pool)
|
||||
.await
|
||||
@@ -156,17 +154,36 @@ mod tests {
|
||||
vec![
|
||||
SimpleUser {
|
||||
display_name: None,
|
||||
first_name: None,
|
||||
uuid: crate::uuid!("a02eaf13-48a7-30f6-a3d4-040ff7c52b04")
|
||||
},
|
||||
SimpleUser {
|
||||
display_name: Some("John Doe".to_owned()),
|
||||
first_name: Some("John".to_owned()),
|
||||
uuid: crate::uuid!("986765a5-3f03-389e-b47b-536b2d6e1bec")
|
||||
}
|
||||
]
|
||||
);
|
||||
#[derive(FromQueryResult, PartialEq, Eq, Debug)]
|
||||
struct UserAttribute {
|
||||
user_attribute_user_id: String,
|
||||
user_attribute_name: String,
|
||||
user_attribute_value: Serialized,
|
||||
}
|
||||
assert_eq!(
|
||||
UserAttribute::find_by_statement(raw_statement(
|
||||
r#"SELECT user_attribute_user_id, user_attribute_name, user_attribute_value FROM user_attributes ORDER BY user_attribute_user_id, user_attribute_value"#
|
||||
))
|
||||
.all(&sql_pool)
|
||||
.await
|
||||
.unwrap(),
|
||||
vec![
|
||||
UserAttribute {
|
||||
user_attribute_user_id: "john".to_owned(),
|
||||
user_attribute_name: "first_name".to_owned(),
|
||||
user_attribute_value: Serialized::from("John"),
|
||||
}
|
||||
]
|
||||
);
|
||||
#[derive(FromQueryResult, PartialEq, Eq, Debug)]
|
||||
struct ShortGroupDetails {
|
||||
group_id: GroupId,
|
||||
display_name: String,
|
||||
@@ -270,6 +287,92 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_migration_to_v5() {
|
||||
crate::infra::logging::init_for_tests();
|
||||
let sql_pool = get_in_memory_db().await;
|
||||
upgrade_to_v1(&sql_pool).await.unwrap();
|
||||
migrate_from_version(&sql_pool, SchemaVersion(1), SchemaVersion(4))
|
||||
.await
|
||||
.unwrap();
|
||||
sql_pool
|
||||
.execute(raw_statement(
|
||||
r#"INSERT INTO users (user_id, email, creation_date, uuid)
|
||||
VALUES ("bob", "bob@bob.com", "1970-01-01 00:00:00", "a02eaf13-48a7-30f6-a3d4-040ff7c52b04")"#,
|
||||
))
|
||||
.await
|
||||
.unwrap();
|
||||
sql_pool
|
||||
.execute(sea_orm::Statement::from_sql_and_values(DbBackend::Sqlite,
|
||||
r#"INSERT INTO users (user_id, email, display_name, first_name, last_name, avatar, creation_date, uuid)
|
||||
VALUES ("bob2", "bob2@bob.com", "display bob", "first bob", "last bob", $1, "1970-01-01 00:00:00", "986765a5-3f03-389e-b47b-536b2d6e1bec")"#, [JpegPhoto::for_tests().into()]),
|
||||
)
|
||||
.await
|
||||
.unwrap();
|
||||
migrate_from_version(&sql_pool, SchemaVersion(4), SchemaVersion(5))
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
sql_migrations::JustSchemaVersion::find_by_statement(raw_statement(
|
||||
r#"SELECT version FROM metadata"#
|
||||
))
|
||||
.one(&sql_pool)
|
||||
.await
|
||||
.unwrap()
|
||||
.unwrap(),
|
||||
sql_migrations::JustSchemaVersion {
|
||||
version: SchemaVersion(5)
|
||||
}
|
||||
);
|
||||
#[derive(FromQueryResult, PartialEq, Eq, Debug)]
|
||||
pub struct UserV5 {
|
||||
user_id: String,
|
||||
email: String,
|
||||
display_name: Option<String>,
|
||||
}
|
||||
assert_eq!(
|
||||
UserV5::find_by_statement(raw_statement(
|
||||
r#"SELECT user_id, email, display_name FROM users ORDER BY user_id ASC"#
|
||||
))
|
||||
.all(&sql_pool)
|
||||
.await
|
||||
.unwrap(),
|
||||
vec![
|
||||
UserV5 {
|
||||
user_id: "bob".to_owned(),
|
||||
email: "bob@bob.com".to_owned(),
|
||||
display_name: None
|
||||
},
|
||||
UserV5 {
|
||||
user_id: "bob2".to_owned(),
|
||||
email: "bob2@bob.com".to_owned(),
|
||||
display_name: Some("display bob".to_owned())
|
||||
},
|
||||
]
|
||||
);
|
||||
sql_pool
|
||||
.execute(raw_statement(r#"SELECT first_name FROM users"#))
|
||||
.await
|
||||
.unwrap_err();
|
||||
#[derive(FromQueryResult, PartialEq, Eq, Debug)]
|
||||
pub struct UserAttribute {
|
||||
user_attribute_user_id: String,
|
||||
user_attribute_name: String,
|
||||
user_attribute_value: Serialized,
|
||||
}
|
||||
assert_eq!(
|
||||
UserAttribute::find_by_statement(raw_statement(r#"SELECT * FROM user_attributes ORDER BY user_attribute_user_id, user_attribute_name ASC"#))
|
||||
.all(&sql_pool)
|
||||
.await
|
||||
.unwrap(),
|
||||
vec![
|
||||
UserAttribute { user_attribute_user_id: "bob2".to_string(), user_attribute_name: "avatar".to_owned(), user_attribute_value: Serialized::from(&JpegPhoto::for_tests()) },
|
||||
UserAttribute { user_attribute_user_id: "bob2".to_string(), user_attribute_name: "first_name".to_owned(), user_attribute_value: Serialized::from("first bob") },
|
||||
UserAttribute { user_attribute_user_id: "bob2".to_string(), user_attribute_name: "last_name".to_owned(), user_attribute_value: Serialized::from("last bob") },
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_too_high_version() {
|
||||
let sql_pool = get_in_memory_db().await;
|
||||
|
||||
@@ -6,17 +6,31 @@ use crate::domain::{
|
||||
},
|
||||
model::{self, GroupColumn, UserColumn},
|
||||
sql_backend_handler::SqlBackendHandler,
|
||||
types::{GroupDetails, GroupId, User, UserAndGroups, UserId, Uuid},
|
||||
types::{AttributeValue, GroupDetails, GroupId, Serialized, User, UserAndGroups, UserId, Uuid},
|
||||
};
|
||||
use async_trait::async_trait;
|
||||
use sea_orm::{
|
||||
entity::IntoActiveValue,
|
||||
sea_query::{Alias, Cond, Expr, Func, IntoColumnRef, IntoCondition, SimpleExpr},
|
||||
ActiveModelTrait, ActiveValue, ColumnTrait, EntityTrait, ModelTrait, QueryFilter, QueryOrder,
|
||||
QuerySelect, QueryTrait, Set,
|
||||
sea_query::{
|
||||
query::OnConflict, Alias, Cond, Expr, Func, IntoColumnRef, IntoCondition, SimpleExpr,
|
||||
},
|
||||
ActiveModelTrait, ActiveValue, ColumnTrait, EntityTrait, IntoActiveValue, ModelTrait,
|
||||
QueryFilter, QueryOrder, QuerySelect, QueryTrait, Set, TransactionTrait,
|
||||
};
|
||||
use std::collections::HashSet;
|
||||
use tracing::{debug, instrument};
|
||||
use tracing::instrument;
|
||||
|
||||
fn attribute_condition(name: String, value: String) -> Cond {
|
||||
Expr::in_subquery(
|
||||
Expr::col(UserColumn::UserId.as_column_ref()),
|
||||
model::UserAttributes::find()
|
||||
.select_only()
|
||||
.column(model::UserAttributesColumn::UserId)
|
||||
.filter(model::UserAttributesColumn::AttributeName.eq(name))
|
||||
.filter(model::UserAttributesColumn::Value.eq(Serialized::from(&value)))
|
||||
.into_query(),
|
||||
)
|
||||
.into_condition()
|
||||
}
|
||||
|
||||
fn get_user_filter_expr(filter: UserRequestFilter) -> Cond {
|
||||
use UserRequestFilter::*;
|
||||
@@ -46,6 +60,7 @@ fn get_user_filter_expr(filter: UserRequestFilter) -> Cond {
|
||||
ColumnTrait::eq(&s1, s2).into_condition()
|
||||
}
|
||||
}
|
||||
AttributeEquality(s1, s2) => attribute_condition(s1, s2),
|
||||
MemberOf(group) => Expr::col((group_table, GroupColumn::DisplayName))
|
||||
.eq(group)
|
||||
.into_condition(),
|
||||
@@ -53,11 +68,13 @@ fn get_user_filter_expr(filter: UserRequestFilter) -> Cond {
|
||||
.eq(group_id)
|
||||
.into_condition(),
|
||||
UserIdSubString(filter) => UserColumn::UserId
|
||||
.like(&filter.to_sql_filter())
|
||||
.into_condition(),
|
||||
SubString(col, filter) => SimpleExpr::FunctionCall(Func::lower(Expr::col(col)))
|
||||
.like(filter.to_sql_filter())
|
||||
.into_condition(),
|
||||
SubString(col, filter) => {
|
||||
SimpleExpr::FunctionCall(Func::lower(Expr::col(col.as_column_ref())))
|
||||
.like(filter.to_sql_filter())
|
||||
.into_condition()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -74,14 +91,14 @@ fn to_value(opt_name: &Option<String>) -> ActiveValue<Option<String>> {
|
||||
|
||||
#[async_trait]
|
||||
impl UserListerBackendHandler for SqlBackendHandler {
|
||||
#[instrument(skip_all, level = "debug", ret, err)]
|
||||
#[instrument(skip(self), level = "debug", ret, err)]
|
||||
async fn list_users(
|
||||
&self,
|
||||
filters: Option<UserRequestFilter>,
|
||||
get_groups: bool,
|
||||
// To simplify the query, we always fetch groups. TODO: cleanup.
|
||||
_get_groups: bool,
|
||||
) -> Result<Vec<UserAndGroups>> {
|
||||
debug!(?filters);
|
||||
let query = model::User::find()
|
||||
let mut users: Vec<_> = model::User::find()
|
||||
.filter(
|
||||
filters
|
||||
.map(|f| {
|
||||
@@ -98,114 +115,203 @@ impl UserListerBackendHandler for SqlBackendHandler {
|
||||
})
|
||||
.unwrap_or_else(|| SimpleExpr::Value(true.into()).into_condition()),
|
||||
)
|
||||
.order_by_asc(UserColumn::UserId);
|
||||
if !get_groups {
|
||||
Ok(query
|
||||
.into_model::<User>()
|
||||
.all(&self.sql_pool)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|u| UserAndGroups {
|
||||
user: u,
|
||||
groups: None,
|
||||
})
|
||||
.collect())
|
||||
} else {
|
||||
let results = query
|
||||
//find_with_linked?
|
||||
.find_also_linked(model::memberships::UserToGroup)
|
||||
.order_by_asc(SimpleExpr::Column(
|
||||
(Alias::new("r1"), GroupColumn::GroupId).into_column_ref(),
|
||||
))
|
||||
.all(&self.sql_pool)
|
||||
.await?;
|
||||
use itertools::Itertools;
|
||||
Ok(results
|
||||
.iter()
|
||||
.group_by(|(u, _)| u)
|
||||
.into_iter()
|
||||
.map(|(user, groups)| {
|
||||
let groups: Vec<_> = groups
|
||||
.into_iter()
|
||||
.flat_map(|(_, g)| g)
|
||||
.map(|g| GroupDetails::from(g.clone()))
|
||||
.collect();
|
||||
UserAndGroups {
|
||||
user: user.clone().into(),
|
||||
groups: Some(groups),
|
||||
}
|
||||
})
|
||||
.collect())
|
||||
.order_by_asc(UserColumn::UserId)
|
||||
.find_with_linked(model::memberships::UserToGroup)
|
||||
.order_by_asc(SimpleExpr::Column(
|
||||
(Alias::new("r1"), GroupColumn::DisplayName).into_column_ref(),
|
||||
))
|
||||
.all(&self.sql_pool)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(|(user, groups)| UserAndGroups {
|
||||
user: user.into(),
|
||||
groups: Some(groups.into_iter().map(Into::<GroupDetails>::into).collect()),
|
||||
})
|
||||
.collect();
|
||||
// At this point, the users don't have attributes, we need to populate it with another query.
|
||||
let user_ids = users.iter().map(|u| &u.user.user_id);
|
||||
let attributes = model::UserAttributes::find()
|
||||
.filter(model::UserAttributesColumn::UserId.is_in(user_ids))
|
||||
.order_by_asc(model::UserAttributesColumn::UserId)
|
||||
.order_by_asc(model::UserAttributesColumn::AttributeName)
|
||||
.all(&self.sql_pool)
|
||||
.await?;
|
||||
let mut attributes_iter = attributes.into_iter().peekable();
|
||||
use itertools::Itertools; // For take_while_ref
|
||||
for user in users.iter_mut() {
|
||||
assert!(attributes_iter
|
||||
.peek()
|
||||
.map(|u| u.user_id >= user.user.user_id)
|
||||
.unwrap_or(true),
|
||||
"Attributes are not sorted, users are not sorted, or previous user didn't consume all the attributes");
|
||||
|
||||
user.user.attributes = attributes_iter
|
||||
.take_while_ref(|u| u.user_id == user.user.user_id)
|
||||
.map(AttributeValue::from)
|
||||
.collect();
|
||||
}
|
||||
Ok(users)
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl UserBackendHandler for SqlBackendHandler {
|
||||
#[instrument(skip_all, level = "debug", ret)]
|
||||
#[instrument(skip_all, level = "debug", ret, fields(user_id = ?user_id.as_str()))]
|
||||
async fn get_user_details(&self, user_id: &UserId) -> Result<User> {
|
||||
debug!(?user_id);
|
||||
model::User::find_by_id(user_id.to_owned())
|
||||
.into_model::<User>()
|
||||
.one(&self.sql_pool)
|
||||
.await?
|
||||
.ok_or_else(|| DomainError::EntityNotFound(user_id.to_string()))
|
||||
let mut user = User::from(
|
||||
model::User::find_by_id(user_id.to_owned())
|
||||
.one(&self.sql_pool)
|
||||
.await?
|
||||
.ok_or_else(|| DomainError::EntityNotFound(user_id.to_string()))?,
|
||||
);
|
||||
let attributes = model::UserAttributes::find()
|
||||
.filter(model::UserAttributesColumn::UserId.eq(user_id))
|
||||
.order_by_asc(model::UserAttributesColumn::AttributeName)
|
||||
.all(&self.sql_pool)
|
||||
.await?;
|
||||
user.attributes = attributes.into_iter().map(AttributeValue::from).collect();
|
||||
Ok(user)
|
||||
}
|
||||
|
||||
#[instrument(skip_all, level = "debug", ret, err)]
|
||||
#[instrument(skip_all, level = "debug", ret, err, fields(user_id = ?user_id.as_str()))]
|
||||
async fn get_user_groups(&self, user_id: &UserId) -> Result<HashSet<GroupDetails>> {
|
||||
debug!(?user_id);
|
||||
let user = model::User::find_by_id(user_id.to_owned())
|
||||
.one(&self.sql_pool)
|
||||
.await?
|
||||
.ok_or_else(|| DomainError::EntityNotFound(user_id.to_string()))?;
|
||||
Ok(HashSet::from_iter(
|
||||
user.find_linked(model::memberships::UserToGroup)
|
||||
.into_model::<GroupDetails>()
|
||||
.all(&self.sql_pool)
|
||||
.await?,
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(Into::<GroupDetails>::into),
|
||||
))
|
||||
}
|
||||
|
||||
#[instrument(skip_all, level = "debug", err)]
|
||||
#[instrument(skip(self), level = "debug", err, fields(user_id = ?request.user_id.as_str()))]
|
||||
async fn create_user(&self, request: CreateUserRequest) -> Result<()> {
|
||||
debug!(user_id = ?request.user_id);
|
||||
let now = chrono::Utc::now().naive_utc();
|
||||
let uuid = Uuid::from_name_and_date(request.user_id.as_str(), &now);
|
||||
let new_user = model::users::ActiveModel {
|
||||
user_id: Set(request.user_id),
|
||||
user_id: Set(request.user_id.clone()),
|
||||
email: Set(request.email),
|
||||
display_name: to_value(&request.display_name),
|
||||
first_name: to_value(&request.first_name),
|
||||
last_name: to_value(&request.last_name),
|
||||
avatar: request.avatar.into_active_value(),
|
||||
creation_date: ActiveValue::Set(now),
|
||||
uuid: ActiveValue::Set(uuid),
|
||||
..Default::default()
|
||||
};
|
||||
new_user.insert(&self.sql_pool).await?;
|
||||
let mut new_user_attributes = Vec::new();
|
||||
if let Some(first_name) = request.first_name {
|
||||
new_user_attributes.push(model::user_attributes::ActiveModel {
|
||||
user_id: Set(request.user_id.clone()),
|
||||
attribute_name: Set("first_name".to_owned()),
|
||||
value: Set(Serialized::from(&first_name)),
|
||||
});
|
||||
}
|
||||
if let Some(last_name) = request.last_name {
|
||||
new_user_attributes.push(model::user_attributes::ActiveModel {
|
||||
user_id: Set(request.user_id.clone()),
|
||||
attribute_name: Set("last_name".to_owned()),
|
||||
value: Set(Serialized::from(&last_name)),
|
||||
});
|
||||
}
|
||||
if let Some(avatar) = request.avatar {
|
||||
new_user_attributes.push(model::user_attributes::ActiveModel {
|
||||
user_id: Set(request.user_id),
|
||||
attribute_name: Set("avatar".to_owned()),
|
||||
value: Set(Serialized::from(&avatar)),
|
||||
});
|
||||
}
|
||||
self.sql_pool
|
||||
.transaction::<_, (), DomainError>(|transaction| {
|
||||
Box::pin(async move {
|
||||
new_user.insert(transaction).await?;
|
||||
if !new_user_attributes.is_empty() {
|
||||
model::UserAttributes::insert_many(new_user_attributes)
|
||||
.exec(transaction)
|
||||
.await?;
|
||||
}
|
||||
Ok(())
|
||||
})
|
||||
})
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[instrument(skip_all, level = "debug", err)]
|
||||
#[instrument(skip(self), level = "debug", err, fields(user_id = ?request.user_id.as_str()))]
|
||||
async fn update_user(&self, request: UpdateUserRequest) -> Result<()> {
|
||||
debug!(user_id = ?request.user_id);
|
||||
let update_user = model::users::ActiveModel {
|
||||
user_id: ActiveValue::Set(request.user_id),
|
||||
user_id: ActiveValue::Set(request.user_id.clone()),
|
||||
email: request.email.map(ActiveValue::Set).unwrap_or_default(),
|
||||
display_name: to_value(&request.display_name),
|
||||
first_name: to_value(&request.first_name),
|
||||
last_name: to_value(&request.last_name),
|
||||
avatar: request.avatar.into_active_value(),
|
||||
..Default::default()
|
||||
};
|
||||
update_user.update(&self.sql_pool).await?;
|
||||
let mut update_user_attributes = Vec::new();
|
||||
let mut remove_user_attributes = Vec::new();
|
||||
let to_serialized_value = |s: &Option<String>| match s.as_ref().map(|s| s.as_str()) {
|
||||
None => None,
|
||||
Some("") => Some(ActiveValue::NotSet),
|
||||
Some(s) => Some(ActiveValue::Set(Serialized::from(s))),
|
||||
};
|
||||
let mut process_serialized =
|
||||
|value: ActiveValue<Serialized>, attribute_name: &str| match &value {
|
||||
ActiveValue::NotSet => {
|
||||
remove_user_attributes.push(attribute_name.to_owned());
|
||||
}
|
||||
ActiveValue::Set(_) => {
|
||||
update_user_attributes.push(model::user_attributes::ActiveModel {
|
||||
user_id: Set(request.user_id.clone()),
|
||||
attribute_name: Set(attribute_name.to_owned()),
|
||||
value,
|
||||
})
|
||||
}
|
||||
_ => unreachable!(),
|
||||
};
|
||||
if let Some(value) = to_serialized_value(&request.first_name) {
|
||||
process_serialized(value, "first_name");
|
||||
}
|
||||
if let Some(value) = to_serialized_value(&request.last_name) {
|
||||
process_serialized(value, "last_name");
|
||||
}
|
||||
if let Some(avatar) = request.avatar {
|
||||
process_serialized(avatar.into_active_value(), "avatar");
|
||||
}
|
||||
self.sql_pool
|
||||
.transaction::<_, (), DomainError>(|transaction| {
|
||||
Box::pin(async move {
|
||||
update_user.update(transaction).await?;
|
||||
if !update_user_attributes.is_empty() {
|
||||
model::UserAttributes::insert_many(update_user_attributes)
|
||||
.on_conflict(
|
||||
OnConflict::columns([
|
||||
model::UserAttributesColumn::UserId,
|
||||
model::UserAttributesColumn::AttributeName,
|
||||
])
|
||||
.update_column(model::UserAttributesColumn::Value)
|
||||
.to_owned(),
|
||||
)
|
||||
.exec(transaction)
|
||||
.await?;
|
||||
}
|
||||
if !remove_user_attributes.is_empty() {
|
||||
model::UserAttributes::delete_many()
|
||||
.filter(model::UserAttributesColumn::UserId.eq(&request.user_id))
|
||||
.filter(
|
||||
model::UserAttributesColumn::AttributeName
|
||||
.is_in(remove_user_attributes),
|
||||
)
|
||||
.exec(transaction)
|
||||
.await?;
|
||||
}
|
||||
Ok(())
|
||||
})
|
||||
})
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[instrument(skip_all, level = "debug", err)]
|
||||
#[instrument(skip_all, level = "debug", err, fields(user_id = ?user_id.as_str()))]
|
||||
async fn delete_user(&self, user_id: &UserId) -> Result<()> {
|
||||
debug!(?user_id);
|
||||
let res = model::User::delete_by_id(user_id.clone())
|
||||
.exec(&self.sql_pool)
|
||||
.await?;
|
||||
@@ -218,9 +324,8 @@ impl UserBackendHandler for SqlBackendHandler {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[instrument(skip_all, level = "debug", err)]
|
||||
#[instrument(skip_all, level = "debug", err, fields(user_id = ?user_id.as_str(), group_id))]
|
||||
async fn add_user_to_group(&self, user_id: &UserId, group_id: GroupId) -> Result<()> {
|
||||
debug!(?user_id, ?group_id);
|
||||
let new_membership = model::memberships::ActiveModel {
|
||||
user_id: ActiveValue::Set(user_id.clone()),
|
||||
group_id: ActiveValue::Set(group_id),
|
||||
@@ -229,9 +334,8 @@ impl UserBackendHandler for SqlBackendHandler {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[instrument(skip_all, level = "debug", err)]
|
||||
#[instrument(skip_all, level = "debug", err, fields(user_id = ?user_id.as_str(), group_id))]
|
||||
async fn remove_user_from_group(&self, user_id: &UserId, group_id: GroupId) -> Result<()> {
|
||||
debug!(?user_id, ?group_id);
|
||||
let res = model::Membership::delete_by_id((user_id.clone(), group_id))
|
||||
.exec(&self.sql_pool)
|
||||
.await?;
|
||||
@@ -253,6 +357,7 @@ mod tests {
|
||||
sql_backend_handler::tests::*,
|
||||
types::{JpegPhoto, UserColumn},
|
||||
};
|
||||
use pretty_assertions::{assert_eq, assert_ne};
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_list_users_no_filter() {
|
||||
@@ -291,8 +396,8 @@ mod tests {
|
||||
let fixture = TestFixture::new().await;
|
||||
let users = get_user_names(
|
||||
&fixture.handler,
|
||||
Some(UserRequestFilter::Equality(
|
||||
UserColumn::FirstName,
|
||||
Some(UserRequestFilter::AttributeEquality(
|
||||
"first_name".to_string(),
|
||||
"first bob".to_string(),
|
||||
)),
|
||||
)
|
||||
@@ -312,10 +417,10 @@ mod tests {
|
||||
final_: Some("K".to_owned()),
|
||||
}),
|
||||
UserRequestFilter::SubString(
|
||||
UserColumn::FirstName,
|
||||
UserColumn::DisplayName,
|
||||
SubStringFilter {
|
||||
initial: None,
|
||||
any: vec!["r".to_owned(), "t".to_owned()],
|
||||
any: vec!["t".to_owned(), "r".to_owned()],
|
||||
final_: None,
|
||||
},
|
||||
),
|
||||
@@ -620,9 +725,23 @@ mod tests {
|
||||
.unwrap();
|
||||
assert_eq!(user.email, "email");
|
||||
assert_eq!(user.display_name.unwrap(), "display_name");
|
||||
assert_eq!(user.first_name.unwrap(), "first_name");
|
||||
assert_eq!(user.last_name.unwrap(), "last_name");
|
||||
assert_eq!(user.avatar, Some(JpegPhoto::for_tests()));
|
||||
assert_eq!(
|
||||
user.attributes,
|
||||
vec![
|
||||
AttributeValue {
|
||||
name: "avatar".to_owned(),
|
||||
value: Serialized::from(&JpegPhoto::for_tests())
|
||||
},
|
||||
AttributeValue {
|
||||
name: "first_name".to_owned(),
|
||||
value: Serialized::from("first_name")
|
||||
},
|
||||
AttributeValue {
|
||||
name: "last_name".to_owned(),
|
||||
value: Serialized::from("last_name")
|
||||
}
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
@@ -633,8 +752,9 @@ mod tests {
|
||||
.handler
|
||||
.update_user(UpdateUserRequest {
|
||||
user_id: UserId::new("bob"),
|
||||
first_name: Some("first_name".to_string()),
|
||||
first_name: None,
|
||||
last_name: Some(String::new()),
|
||||
avatar: Some(JpegPhoto::for_tests()),
|
||||
..Default::default()
|
||||
})
|
||||
.await
|
||||
@@ -646,9 +766,104 @@ mod tests {
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(user.display_name.unwrap(), "display bob");
|
||||
assert_eq!(user.first_name.unwrap(), "first_name");
|
||||
assert_eq!(user.last_name, None);
|
||||
assert_eq!(user.avatar, None);
|
||||
assert_eq!(
|
||||
user.attributes,
|
||||
vec![
|
||||
AttributeValue {
|
||||
name: "avatar".to_owned(),
|
||||
value: Serialized::from(&JpegPhoto::for_tests())
|
||||
},
|
||||
AttributeValue {
|
||||
name: "first_name".to_owned(),
|
||||
value: Serialized::from("first bob")
|
||||
}
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_update_user_delete_avatar() {
|
||||
let fixture = TestFixture::new().await;
|
||||
|
||||
fixture
|
||||
.handler
|
||||
.update_user(UpdateUserRequest {
|
||||
user_id: UserId::new("bob"),
|
||||
avatar: Some(JpegPhoto::for_tests()),
|
||||
..Default::default()
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let user = fixture
|
||||
.handler
|
||||
.get_user_details(&UserId::new("bob"))
|
||||
.await
|
||||
.unwrap();
|
||||
let avatar = AttributeValue {
|
||||
name: "avatar".to_owned(),
|
||||
value: Serialized::from(&JpegPhoto::for_tests()),
|
||||
};
|
||||
assert!(user.attributes.contains(&avatar));
|
||||
fixture
|
||||
.handler
|
||||
.update_user(UpdateUserRequest {
|
||||
user_id: UserId::new("bob"),
|
||||
avatar: Some(JpegPhoto::null()),
|
||||
..Default::default()
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let user = fixture
|
||||
.handler
|
||||
.get_user_details(&UserId::new("bob"))
|
||||
.await
|
||||
.unwrap();
|
||||
assert!(!user.attributes.contains(&avatar));
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_create_user_all_values() {
|
||||
let fixture = TestFixture::new().await;
|
||||
|
||||
fixture
|
||||
.handler
|
||||
.create_user(CreateUserRequest {
|
||||
user_id: UserId::new("james"),
|
||||
email: "email".to_string(),
|
||||
display_name: Some("display_name".to_string()),
|
||||
first_name: Some("first_name".to_string()),
|
||||
last_name: Some("last_name".to_string()),
|
||||
avatar: Some(JpegPhoto::for_tests()),
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
let user = fixture
|
||||
.handler
|
||||
.get_user_details(&UserId::new("james"))
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(user.email, "email");
|
||||
assert_eq!(user.display_name.unwrap(), "display_name");
|
||||
assert_eq!(
|
||||
user.attributes,
|
||||
vec![
|
||||
AttributeValue {
|
||||
name: "avatar".to_owned(),
|
||||
value: Serialized::from(&JpegPhoto::for_tests())
|
||||
},
|
||||
AttributeValue {
|
||||
name: "first_name".to_owned(),
|
||||
value: Serialized::from("first_name")
|
||||
},
|
||||
AttributeValue {
|
||||
name: "last_name".to_owned(),
|
||||
value: Serialized::from("last_name")
|
||||
}
|
||||
]
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
@@ -670,4 +885,32 @@ mod tests {
|
||||
vec!["patrick"]
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_delete_user_not_found() {
|
||||
let fixture = TestFixture::new().await;
|
||||
|
||||
fixture
|
||||
.handler
|
||||
.delete_user(&UserId::new("not found"))
|
||||
.await
|
||||
.expect_err("Should have failed");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_remove_user_from_group_not_found() {
|
||||
let fixture = TestFixture::new().await;
|
||||
|
||||
fixture
|
||||
.handler
|
||||
.remove_user_from_group(&UserId::new("not found"), fixture.groups[0])
|
||||
.await
|
||||
.expect_err("Should have failed");
|
||||
|
||||
fixture
|
||||
.handler
|
||||
.remove_user_from_group(&UserId::new("not found"), GroupId(16242))
|
||||
.await
|
||||
.expect_err("Should have failed");
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,15 +2,17 @@ use base64::Engine;
|
||||
use chrono::{NaiveDateTime, TimeZone};
|
||||
use sea_orm::{
|
||||
entity::IntoActiveValue,
|
||||
sea_query::{value::ValueType, ArrayType, ColumnType, Nullable, ValueTypeErr},
|
||||
DbErr, FromQueryResult, QueryResult, TryFromU64, TryGetError, TryGetable, Value,
|
||||
sea_query::{value::ValueType, ArrayType, BlobSize, ColumnType, Nullable, ValueTypeErr},
|
||||
DbErr, DeriveValueType, QueryResult, TryFromU64, TryGetError, TryGetable, Value,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use strum::{EnumString, IntoStaticStr};
|
||||
|
||||
pub use super::model::{GroupColumn, UserColumn};
|
||||
|
||||
#[derive(PartialEq, Hash, Eq, Clone, Debug, Default, Serialize, Deserialize)]
|
||||
#[derive(PartialEq, Hash, Eq, Clone, Debug, Default, Serialize, Deserialize, DeriveValueType)]
|
||||
#[serde(try_from = "&str")]
|
||||
#[sea_orm(column_type = "String(Some(36))")]
|
||||
pub struct Uuid(String);
|
||||
|
||||
impl Uuid {
|
||||
@@ -53,48 +55,6 @@ impl std::string::ToString for Uuid {
|
||||
}
|
||||
}
|
||||
|
||||
impl TryGetable for Uuid {
|
||||
fn try_get_by<I: sea_orm::ColIdx>(
|
||||
res: &QueryResult,
|
||||
index: I,
|
||||
) -> std::result::Result<Self, TryGetError> {
|
||||
Ok(Uuid(String::try_get_by(res, index)?))
|
||||
}
|
||||
}
|
||||
|
||||
impl ValueType for Uuid {
|
||||
fn try_from(v: Value) -> Result<Self, ValueTypeErr> {
|
||||
<Self as std::convert::TryFrom<_>>::try_from(
|
||||
<std::string::String as sea_orm::sea_query::ValueType>::try_from(v)?.as_str(),
|
||||
)
|
||||
.map_err(|_| ValueTypeErr {})
|
||||
}
|
||||
|
||||
fn type_name() -> String {
|
||||
"Uuid".to_owned()
|
||||
}
|
||||
|
||||
fn array_type() -> ArrayType {
|
||||
ArrayType::String
|
||||
}
|
||||
|
||||
fn column_type() -> ColumnType {
|
||||
ColumnType::String(Some(36))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Uuid> for Value {
|
||||
fn from(uuid: Uuid) -> Self {
|
||||
uuid.as_str().into()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&Uuid> for Value {
|
||||
fn from(uuid: &Uuid) -> Self {
|
||||
uuid.as_str().into()
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
#[macro_export]
|
||||
macro_rules! uuid {
|
||||
@@ -103,7 +63,66 @@ macro_rules! uuid {
|
||||
};
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Clone, Debug, Default, Serialize, Deserialize)]
|
||||
#[derive(Clone, PartialEq, Eq, Hash, Serialize, Deserialize, DeriveValueType)]
|
||||
#[sea_orm(column_type = "Binary(BlobSize::Long)", array_type = "Bytes")]
|
||||
pub struct Serialized(Vec<u8>);
|
||||
|
||||
const SERIALIZED_I64_LEN: usize = 8;
|
||||
|
||||
impl std::fmt::Debug for Serialized {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
f.debug_tuple("Serialized")
|
||||
.field(
|
||||
&self
|
||||
.convert_to()
|
||||
.and_then(|s| {
|
||||
String::from_utf8(s)
|
||||
.map_err(|_| Box::new(bincode::ErrorKind::InvalidCharEncoding))
|
||||
})
|
||||
.or_else(|e| {
|
||||
if self.0.len() == SERIALIZED_I64_LEN {
|
||||
self.convert_to::<i64>()
|
||||
.map(|i| i.to_string())
|
||||
.map_err(|_| Box::new(bincode::ErrorKind::InvalidCharEncoding))
|
||||
} else {
|
||||
Err(e)
|
||||
}
|
||||
})
|
||||
.unwrap_or_else(|_| {
|
||||
format!("hash: {:#016X}", {
|
||||
let mut hasher = std::collections::hash_map::DefaultHasher::new();
|
||||
std::hash::Hash::hash(&self.0, &mut hasher);
|
||||
std::hash::Hasher::finish(&hasher)
|
||||
})
|
||||
}),
|
||||
)
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a, T: Serialize + ?Sized> From<&'a T> for Serialized {
|
||||
fn from(t: &'a T) -> Self {
|
||||
Self(bincode::serialize(&t).unwrap())
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialized {
|
||||
fn convert_to<'a, T: Deserialize<'a>>(&'a self) -> bincode::Result<T> {
|
||||
bincode::deserialize(&self.0)
|
||||
}
|
||||
|
||||
pub fn unwrap<'a, T: Deserialize<'a>>(&'a self) -> T {
|
||||
self.convert_to().unwrap()
|
||||
}
|
||||
|
||||
pub fn expect<'a, T: Deserialize<'a>>(&'a self, message: &str) -> T {
|
||||
self.convert_to().expect(message)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(
|
||||
PartialEq, Eq, PartialOrd, Ord, Clone, Debug, Default, Serialize, Deserialize, DeriveValueType,
|
||||
)]
|
||||
#[serde(from = "String")]
|
||||
pub struct UserId(String);
|
||||
|
||||
@@ -133,24 +152,12 @@ impl From<String> for UserId {
|
||||
}
|
||||
}
|
||||
|
||||
impl From<UserId> for Value {
|
||||
fn from(user_id: UserId) -> Self {
|
||||
user_id.into_string().into()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&UserId> for Value {
|
||||
fn from(user_id: &UserId) -> Self {
|
||||
user_id.as_str().into()
|
||||
}
|
||||
}
|
||||
|
||||
impl TryGetable for UserId {
|
||||
fn try_get_by<I: sea_orm::ColIdx>(res: &QueryResult, index: I) -> Result<Self, TryGetError> {
|
||||
Ok(UserId::new(&String::try_get_by(res, index)?))
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFromU64 for UserId {
|
||||
fn try_from_u64(_n: u64) -> Result<Self, DbErr> {
|
||||
Err(DbErr::ConvertFromU64(
|
||||
@@ -159,33 +166,10 @@ impl TryFromU64 for UserId {
|
||||
}
|
||||
}
|
||||
|
||||
impl ValueType for UserId {
|
||||
fn try_from(v: Value) -> Result<Self, ValueTypeErr> {
|
||||
Ok(UserId::new(<String as ValueType>::try_from(v)?.as_str()))
|
||||
}
|
||||
|
||||
fn type_name() -> String {
|
||||
"UserId".to_owned()
|
||||
}
|
||||
|
||||
fn array_type() -> ArrayType {
|
||||
ArrayType::String
|
||||
}
|
||||
|
||||
fn column_type() -> ColumnType {
|
||||
ColumnType::String(Some(255))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Clone, Debug, Serialize, Deserialize)]
|
||||
#[derive(PartialEq, Eq, Clone, Serialize, Deserialize, DeriveValueType)]
|
||||
#[sea_orm(column_type = "Binary(BlobSize::Long)", array_type = "Bytes")]
|
||||
pub struct JpegPhoto(#[serde(with = "serde_bytes")] Vec<u8>);
|
||||
|
||||
impl From<JpegPhoto> for Value {
|
||||
fn from(photo: JpegPhoto) -> Self {
|
||||
photo.0.into()
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&JpegPhoto> for Value {
|
||||
fn from(photo: &JpegPhoto) -> Self {
|
||||
photo.0.as_slice().into()
|
||||
@@ -237,7 +221,24 @@ impl From<&JpegPhoto> for String {
|
||||
}
|
||||
}
|
||||
|
||||
impl std::fmt::Debug for JpegPhoto {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
let mut encoded = base64::engine::general_purpose::STANDARD.encode(&self.0);
|
||||
if encoded.len() > 100 {
|
||||
encoded.truncate(100);
|
||||
encoded.push_str(" ...");
|
||||
};
|
||||
f.debug_tuple("JpegPhoto")
|
||||
.field(&format!("b64[{}]", encoded))
|
||||
.finish()
|
||||
}
|
||||
}
|
||||
|
||||
impl JpegPhoto {
|
||||
pub fn is_empty(&self) -> bool {
|
||||
self.0.is_empty()
|
||||
}
|
||||
|
||||
pub fn null() -> Self {
|
||||
Self(vec![])
|
||||
}
|
||||
@@ -266,62 +267,36 @@ impl JpegPhoto {
|
||||
}
|
||||
}
|
||||
|
||||
impl TryGetable for JpegPhoto {
|
||||
fn try_get_by<I: sea_orm::ColIdx>(res: &QueryResult, index: I) -> Result<Self, TryGetError> {
|
||||
<Self as std::convert::TryFrom<Vec<_>>>::try_from(Vec::<u8>::try_get_by(res, index)?)
|
||||
.map_err(|e| {
|
||||
TryGetError::DbErr(DbErr::TryIntoErr {
|
||||
from: "[u8]",
|
||||
into: "JpegPhoto",
|
||||
source: e.into(),
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl ValueType for JpegPhoto {
|
||||
fn try_from(v: Value) -> Result<Self, ValueTypeErr> {
|
||||
<Self as std::convert::TryFrom<_>>::try_from(
|
||||
<Vec<u8> as sea_orm::sea_query::ValueType>::try_from(v)?.as_slice(),
|
||||
)
|
||||
.map_err(|_| ValueTypeErr {})
|
||||
}
|
||||
|
||||
fn type_name() -> String {
|
||||
"JpegPhoto".to_owned()
|
||||
}
|
||||
|
||||
fn array_type() -> ArrayType {
|
||||
ArrayType::Bytes
|
||||
}
|
||||
|
||||
fn column_type() -> ColumnType {
|
||||
ColumnType::Binary(sea_orm::sea_query::BlobSize::Long)
|
||||
}
|
||||
}
|
||||
|
||||
impl Nullable for JpegPhoto {
|
||||
fn null() -> Value {
|
||||
JpegPhoto::null().into()
|
||||
}
|
||||
}
|
||||
|
||||
impl IntoActiveValue<JpegPhoto> for JpegPhoto {
|
||||
fn into_active_value(self) -> sea_orm::ActiveValue<JpegPhoto> {
|
||||
sea_orm::ActiveValue::Set(self)
|
||||
impl IntoActiveValue<Serialized> for JpegPhoto {
|
||||
fn into_active_value(self) -> sea_orm::ActiveValue<Serialized> {
|
||||
if self.is_empty() {
|
||||
sea_orm::ActiveValue::NotSet
|
||||
} else {
|
||||
sea_orm::ActiveValue::Set(Serialized::from(&self))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Debug, Clone, Serialize, Deserialize, FromQueryResult)]
|
||||
#[derive(PartialEq, Eq, Debug, Clone, Serialize, Deserialize, Hash)]
|
||||
pub struct AttributeValue {
|
||||
pub name: String,
|
||||
pub value: Serialized,
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct User {
|
||||
pub user_id: UserId,
|
||||
pub email: String,
|
||||
pub display_name: Option<String>,
|
||||
pub first_name: Option<String>,
|
||||
pub last_name: Option<String>,
|
||||
pub avatar: Option<JpegPhoto>,
|
||||
pub creation_date: NaiveDateTime,
|
||||
pub uuid: Uuid,
|
||||
pub attributes: Vec<AttributeValue>,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
@@ -332,54 +307,85 @@ impl Default for User {
|
||||
user_id: UserId::default(),
|
||||
email: String::new(),
|
||||
display_name: None,
|
||||
first_name: None,
|
||||
last_name: None,
|
||||
avatar: None,
|
||||
creation_date: epoch,
|
||||
uuid: Uuid::from_name_and_date("", &epoch),
|
||||
attributes: Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
#[derive(
|
||||
Debug,
|
||||
Copy,
|
||||
Clone,
|
||||
PartialEq,
|
||||
Eq,
|
||||
PartialOrd,
|
||||
Ord,
|
||||
Hash,
|
||||
Serialize,
|
||||
Deserialize,
|
||||
DeriveValueType,
|
||||
)]
|
||||
pub struct GroupId(pub i32);
|
||||
|
||||
impl From<GroupId> for Value {
|
||||
fn from(group_id: GroupId) -> Self {
|
||||
group_id.0.into()
|
||||
}
|
||||
}
|
||||
|
||||
impl TryGetable for GroupId {
|
||||
fn try_get_by<I: sea_orm::ColIdx>(res: &QueryResult, index: I) -> Result<Self, TryGetError> {
|
||||
Ok(GroupId(i32::try_get_by(res, index)?))
|
||||
}
|
||||
}
|
||||
|
||||
impl ValueType for GroupId {
|
||||
fn try_from(v: Value) -> Result<Self, ValueTypeErr> {
|
||||
Ok(GroupId(<i32 as ValueType>::try_from(v)?))
|
||||
}
|
||||
|
||||
fn type_name() -> String {
|
||||
"GroupId".to_owned()
|
||||
}
|
||||
|
||||
fn array_type() -> ArrayType {
|
||||
ArrayType::Int
|
||||
}
|
||||
|
||||
fn column_type() -> ColumnType {
|
||||
ColumnType::Integer
|
||||
}
|
||||
}
|
||||
|
||||
impl TryFromU64 for GroupId {
|
||||
fn try_from_u64(n: u64) -> Result<Self, DbErr> {
|
||||
Ok(GroupId(i32::try_from_u64(n)?))
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&GroupId> for Value {
|
||||
fn from(id: &GroupId) -> Self {
|
||||
(*id).into()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(
|
||||
Debug, Copy, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, EnumString, IntoStaticStr,
|
||||
)]
|
||||
pub enum AttributeType {
|
||||
String,
|
||||
Integer,
|
||||
JpegPhoto,
|
||||
DateTime,
|
||||
}
|
||||
|
||||
impl From<AttributeType> for Value {
|
||||
fn from(attribute_type: AttributeType) -> Self {
|
||||
Into::<&'static str>::into(attribute_type).into()
|
||||
}
|
||||
}
|
||||
|
||||
impl TryGetable for AttributeType {
|
||||
fn try_get_by<I: sea_orm::ColIdx>(res: &QueryResult, index: I) -> Result<Self, TryGetError> {
|
||||
use std::str::FromStr;
|
||||
Ok(AttributeType::from_str(&String::try_get_by(res, index)?).expect("Invalid enum value"))
|
||||
}
|
||||
}
|
||||
|
||||
impl ValueType for AttributeType {
|
||||
fn try_from(v: Value) -> Result<Self, ValueTypeErr> {
|
||||
use std::str::FromStr;
|
||||
Ok(
|
||||
AttributeType::from_str(&<String as ValueType>::try_from(v)?)
|
||||
.expect("Invalid enum value"),
|
||||
)
|
||||
}
|
||||
|
||||
fn type_name() -> String {
|
||||
"AttributeType".to_owned()
|
||||
}
|
||||
|
||||
fn array_type() -> ArrayType {
|
||||
ArrayType::String
|
||||
}
|
||||
|
||||
fn column_type() -> ColumnType {
|
||||
ColumnType::String(Some(64))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Debug, Serialize, Deserialize)]
|
||||
pub struct Group {
|
||||
pub id: GroupId,
|
||||
@@ -387,14 +393,16 @@ pub struct Group {
|
||||
pub creation_date: NaiveDateTime,
|
||||
pub uuid: Uuid,
|
||||
pub users: Vec<UserId>,
|
||||
pub attributes: Vec<AttributeValue>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize, FromQueryResult)]
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
|
||||
pub struct GroupDetails {
|
||||
pub group_id: GroupId,
|
||||
pub display_name: String,
|
||||
pub creation_date: NaiveDateTime,
|
||||
pub uuid: Uuid,
|
||||
pub attributes: Vec<AttributeValue>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
@@ -402,3 +410,39 @@ pub struct UserAndGroups {
|
||||
pub user: User,
|
||||
pub groups: Option<Vec<GroupDetails>>,
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[test]
|
||||
fn test_serialized_debug_string() {
|
||||
assert_eq!(
|
||||
&format!("{:?}", Serialized::from("abcd")),
|
||||
"Serialized(\"abcd\")"
|
||||
);
|
||||
assert_eq!(
|
||||
&format!("{:?}", Serialized::from(&1234i64)),
|
||||
"Serialized(\"1234\")"
|
||||
);
|
||||
assert_eq!(
|
||||
&format!("{:?}", Serialized::from(&JpegPhoto::for_tests())),
|
||||
"Serialized(\"hash: 0xB947C77A16F3C3BD\")"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_serialized_i64_len() {
|
||||
assert_eq!(SERIALIZED_I64_LEN, Serialized::from(&0i64).0.len());
|
||||
assert_eq!(
|
||||
SERIALIZED_I64_LEN,
|
||||
Serialized::from(&i64::max_value()).0.len()
|
||||
);
|
||||
assert_eq!(
|
||||
SERIALIZED_I64_LEN,
|
||||
Serialized::from(&i64::min_value()).0.len()
|
||||
);
|
||||
assert_eq!(SERIALIZED_I64_LEN, Serialized::from(&-1000i64).0.len());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,8 +6,10 @@ use tracing::info;
|
||||
use crate::domain::{
|
||||
error::Result,
|
||||
handler::{
|
||||
BackendHandler, CreateUserRequest, GroupListerBackendHandler, GroupRequestFilter,
|
||||
UpdateGroupRequest, UpdateUserRequest, UserListerBackendHandler, UserRequestFilter,
|
||||
AttributeSchema, BackendHandler, CreateUserRequest, GroupBackendHandler,
|
||||
GroupListerBackendHandler, GroupRequestFilter, Schema, SchemaBackendHandler,
|
||||
UpdateGroupRequest, UpdateUserRequest, UserBackendHandler, UserListerBackendHandler,
|
||||
UserRequestFilter,
|
||||
},
|
||||
types::{Group, GroupDetails, GroupId, User, UserAndGroups, UserId},
|
||||
};
|
||||
@@ -106,10 +108,10 @@ pub trait AdminBackendHandler:
|
||||
#[async_trait]
|
||||
impl<Handler: BackendHandler> UserReadableBackendHandler for Handler {
|
||||
async fn get_user_details(&self, user_id: &UserId) -> Result<User> {
|
||||
self.get_user_details(user_id).await
|
||||
<Handler as UserBackendHandler>::get_user_details(self, user_id).await
|
||||
}
|
||||
async fn get_user_groups(&self, user_id: &UserId) -> Result<HashSet<GroupDetails>> {
|
||||
self.get_user_groups(user_id).await
|
||||
<Handler as UserBackendHandler>::get_user_groups(self, user_id).await
|
||||
}
|
||||
}
|
||||
|
||||
@@ -120,44 +122,44 @@ impl<Handler: BackendHandler> ReadonlyBackendHandler for Handler {
|
||||
filters: Option<UserRequestFilter>,
|
||||
get_groups: bool,
|
||||
) -> Result<Vec<UserAndGroups>> {
|
||||
self.list_users(filters, get_groups).await
|
||||
<Handler as UserListerBackendHandler>::list_users(self, filters, get_groups).await
|
||||
}
|
||||
async fn list_groups(&self, filters: Option<GroupRequestFilter>) -> Result<Vec<Group>> {
|
||||
self.list_groups(filters).await
|
||||
<Handler as GroupListerBackendHandler>::list_groups(self, filters).await
|
||||
}
|
||||
async fn get_group_details(&self, group_id: GroupId) -> Result<GroupDetails> {
|
||||
self.get_group_details(group_id).await
|
||||
<Handler as GroupBackendHandler>::get_group_details(self, group_id).await
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl<Handler: BackendHandler> UserWriteableBackendHandler for Handler {
|
||||
async fn update_user(&self, request: UpdateUserRequest) -> Result<()> {
|
||||
self.update_user(request).await
|
||||
<Handler as UserBackendHandler>::update_user(self, request).await
|
||||
}
|
||||
}
|
||||
#[async_trait]
|
||||
impl<Handler: BackendHandler> AdminBackendHandler for Handler {
|
||||
async fn create_user(&self, request: CreateUserRequest) -> Result<()> {
|
||||
self.create_user(request).await
|
||||
<Handler as UserBackendHandler>::create_user(self, request).await
|
||||
}
|
||||
async fn delete_user(&self, user_id: &UserId) -> Result<()> {
|
||||
self.delete_user(user_id).await
|
||||
<Handler as UserBackendHandler>::delete_user(self, user_id).await
|
||||
}
|
||||
async fn add_user_to_group(&self, user_id: &UserId, group_id: GroupId) -> Result<()> {
|
||||
self.add_user_to_group(user_id, group_id).await
|
||||
<Handler as UserBackendHandler>::add_user_to_group(self, user_id, group_id).await
|
||||
}
|
||||
async fn remove_user_from_group(&self, user_id: &UserId, group_id: GroupId) -> Result<()> {
|
||||
self.remove_user_from_group(user_id, group_id).await
|
||||
<Handler as UserBackendHandler>::remove_user_from_group(self, user_id, group_id).await
|
||||
}
|
||||
async fn update_group(&self, request: UpdateGroupRequest) -> Result<()> {
|
||||
self.update_group(request).await
|
||||
<Handler as GroupBackendHandler>::update_group(self, request).await
|
||||
}
|
||||
async fn create_group(&self, group_name: &str) -> Result<GroupId> {
|
||||
self.create_group(group_name).await
|
||||
<Handler as GroupBackendHandler>::create_group(self, group_name).await
|
||||
}
|
||||
async fn delete_group(&self, group_id: GroupId) -> Result<()> {
|
||||
self.delete_group(group_id).await
|
||||
<Handler as GroupBackendHandler>::delete_group(self, group_id).await
|
||||
}
|
||||
}
|
||||
|
||||
@@ -262,6 +264,23 @@ pub struct UserRestrictedListerBackendHandler<'a, Handler> {
|
||||
pub user_filter: Option<UserId>,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl<'a, Handler: SchemaBackendHandler + Sync> SchemaBackendHandler
|
||||
for UserRestrictedListerBackendHandler<'a, Handler>
|
||||
{
|
||||
async fn get_schema(&self) -> Result<Schema> {
|
||||
let mut schema = self.handler.get_schema().await?;
|
||||
if self.user_filter.is_some() {
|
||||
let filter_attributes = |attributes: &mut Vec<AttributeSchema>| {
|
||||
attributes.retain(|a| a.is_visible);
|
||||
};
|
||||
filter_attributes(&mut schema.user_attributes.attributes);
|
||||
filter_attributes(&mut schema.group_attributes.attributes);
|
||||
}
|
||||
Ok(schema)
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl<'a, Handler: UserListerBackendHandler + Sync> UserListerBackendHandler
|
||||
for UserRestrictedListerBackendHandler<'a, Handler>
|
||||
|
||||
@@ -423,7 +423,7 @@ where
|
||||
.unwrap_or_else(error_to_http_response)
|
||||
}
|
||||
|
||||
#[instrument(skip_all, level = "debug")]
|
||||
#[instrument(skip_all, level = "debug", fields(name = %request.name))]
|
||||
async fn post_authorize<Backend>(
|
||||
data: web::Data<AppState<Backend>>,
|
||||
request: web::Json<BindRequest>,
|
||||
@@ -432,7 +432,6 @@ where
|
||||
Backend: TcpBackendHandler + BackendHandler + LoginHandler + 'static,
|
||||
{
|
||||
let name = request.name.clone();
|
||||
debug!(%name);
|
||||
data.get_login_handler().bind(request.into_inner()).await?;
|
||||
get_login_successful_response(&data, &name).await
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use clap::{builder::EnumValueParser, Parser};
|
||||
use lettre::message::Mailbox;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use url::Url;
|
||||
|
||||
/// lldap is a lightweight LDAP server
|
||||
#[derive(Debug, Parser, Clone)]
|
||||
@@ -82,7 +83,7 @@ pub struct RunOpts {
|
||||
|
||||
/// URL of the server, for password reset links.
|
||||
#[clap(long, env = "LLDAP_HTTP_URL")]
|
||||
pub http_url: Option<String>,
|
||||
pub http_url: Option<Url>,
|
||||
|
||||
/// Database connection URL
|
||||
#[clap(short, long, env = "LLDAP_DATABASE_URL")]
|
||||
@@ -130,6 +131,7 @@ pub struct LdapsOpts {
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize, clap::ValueEnum)]
|
||||
#[serde(rename_all = "UPPERCASE")]
|
||||
#[clap(rename_all = "UPPERCASE")]
|
||||
pub enum SmtpEncryption {
|
||||
None,
|
||||
Tls,
|
||||
@@ -171,7 +173,7 @@ pub struct SmtpOpts {
|
||||
#[clap(long, env = "LLDAP_SMTP_OPTIONS__TLS_REQUIRED", hide = true)]
|
||||
pub smtp_tls_required: Option<bool>,
|
||||
|
||||
#[clap(long, env = "LLDAP_SMTP_OPTIONS__ENCRYPTION", value_parser = EnumValueParser::<SmtpEncryption>::new(), ignore_case = true)]
|
||||
#[clap(long, env = "LLDAP_SMTP_OPTIONS__SMTP_ENCRYPTION", value_parser = EnumValueParser::<SmtpEncryption>::new(), ignore_case = true)]
|
||||
pub smtp_encryption: Option<SmtpEncryption>,
|
||||
}
|
||||
|
||||
|
||||
@@ -11,6 +11,7 @@ use lettre::message::Mailbox;
|
||||
use lldap_auth::opaque::{server::ServerSetup, KeyPair};
|
||||
use secstr::SecUtf8;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use url::Url;
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize, derive_builder::Builder)]
|
||||
#[builder(pattern = "owned")]
|
||||
@@ -100,8 +101,8 @@ pub struct Configuration {
|
||||
pub smtp_options: MailOptions,
|
||||
#[builder(default)]
|
||||
pub ldaps_options: LdapsOptions,
|
||||
#[builder(default = r#"String::from("http://localhost")"#)]
|
||||
pub http_url: String,
|
||||
#[builder(default = r#"Url::parse("http://localhost").unwrap()"#)]
|
||||
pub http_url: Url,
|
||||
#[serde(skip)]
|
||||
#[builder(field(private), default = "None")]
|
||||
server_setup: Option<ServerSetup>,
|
||||
@@ -237,7 +238,7 @@ impl ConfigOverrider for RunOpts {
|
||||
}
|
||||
|
||||
if let Some(url) = self.http_url.as_ref() {
|
||||
config.http_url = url.to_string();
|
||||
config.http_url = url.clone();
|
||||
}
|
||||
|
||||
if let Some(database_url) = self.database_url.as_ref() {
|
||||
@@ -359,6 +360,7 @@ where
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use pretty_assertions::assert_eq;
|
||||
|
||||
#[test]
|
||||
fn check_generated_server_key() {
|
||||
|
||||
@@ -48,7 +48,6 @@ impl Scheduler {
|
||||
|
||||
#[instrument(skip_all)]
|
||||
async fn cleanup_db(sql_pool: DbConnection) {
|
||||
info!("Cleaning DB");
|
||||
if let Err(e) = model::JwtRefreshStorage::delete_many()
|
||||
.filter(JwtRefreshStorageColumn::ExpiryDate.lt(chrono::Utc::now().naive_utc()))
|
||||
.exec(&sql_pool)
|
||||
@@ -70,7 +69,6 @@ impl Scheduler {
|
||||
{
|
||||
error!("DB error while cleaning up password reset tokens: {}", e);
|
||||
};
|
||||
info!("DB cleaned!");
|
||||
}
|
||||
|
||||
fn duration_until_next(&self) -> Duration {
|
||||
|
||||
@@ -1,16 +1,17 @@
|
||||
use crate::{
|
||||
domain::{
|
||||
handler::BackendHandler,
|
||||
ldap::utils::map_user_field,
|
||||
types::{GroupDetails, GroupId, UserColumn, UserId},
|
||||
handler::{BackendHandler, SchemaBackendHandler},
|
||||
ldap::utils::{map_user_field, UserFieldType},
|
||||
types::{AttributeType, GroupDetails, GroupId, JpegPhoto, UserColumn, UserId},
|
||||
},
|
||||
infra::{
|
||||
access_control::{ReadonlyBackendHandler, UserReadableBackendHandler},
|
||||
graphql::api::field_error_callback,
|
||||
graphql::api::{field_error_callback, Context},
|
||||
schema::PublicSchema,
|
||||
},
|
||||
};
|
||||
use chrono::TimeZone;
|
||||
use juniper::{graphql_object, FieldResult, GraphQLInputObject};
|
||||
use chrono::{NaiveDateTime, TimeZone};
|
||||
use juniper::{graphql_object, FieldError, FieldResult, GraphQLInputObject};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tracing::{debug, debug_span, Instrument};
|
||||
|
||||
@@ -18,7 +19,10 @@ type DomainRequestFilter = crate::domain::handler::UserRequestFilter;
|
||||
type DomainUser = crate::domain::types::User;
|
||||
type DomainGroup = crate::domain::types::Group;
|
||||
type DomainUserAndGroups = crate::domain::types::UserAndGroups;
|
||||
use super::api::Context;
|
||||
type DomainSchema = crate::infra::schema::PublicSchema;
|
||||
type DomainAttributeList = crate::domain::handler::AttributeList;
|
||||
type DomainAttributeSchema = crate::domain::handler::AttributeSchema;
|
||||
type DomainAttributeValue = crate::domain::types::AttributeValue;
|
||||
|
||||
#[derive(PartialEq, Eq, Debug, GraphQLInputObject)]
|
||||
/// A filter for requests, specifying a boolean expression based on field constraints. Only one of
|
||||
@@ -61,14 +65,19 @@ impl TryInto<DomainRequestFilter> for RequestFilter {
|
||||
return Err("Multiple fields specified in request filter".to_string());
|
||||
}
|
||||
if let Some(e) = self.eq {
|
||||
if let Some(column) = map_user_field(&e.field) {
|
||||
if column == UserColumn::UserId {
|
||||
return Ok(DomainRequestFilter::UserId(UserId::new(&e.value)));
|
||||
return match map_user_field(&e.field.to_ascii_lowercase()) {
|
||||
UserFieldType::NoMatch => Err(format!("Unknown request filter: {}", &e.field)),
|
||||
UserFieldType::PrimaryField(UserColumn::UserId) => {
|
||||
Ok(DomainRequestFilter::UserId(UserId::new(&e.value)))
|
||||
}
|
||||
return Ok(DomainRequestFilter::Equality(column, e.value));
|
||||
} else {
|
||||
return Err(format!("Unknown request filter: {}", &e.field));
|
||||
}
|
||||
UserFieldType::PrimaryField(column) => {
|
||||
Ok(DomainRequestFilter::Equality(column, e.value))
|
||||
}
|
||||
UserFieldType::Attribute(column) => Ok(DomainRequestFilter::AttributeEquality(
|
||||
column.to_owned(),
|
||||
e.value,
|
||||
)),
|
||||
};
|
||||
}
|
||||
if let Some(c) = self.any {
|
||||
return Ok(DomainRequestFilter::Or(
|
||||
@@ -197,6 +206,19 @@ impl<Handler: BackendHandler> Query<Handler> {
|
||||
.await
|
||||
.map(Into::into)?)
|
||||
}
|
||||
|
||||
async fn schema(context: &Context<Handler>) -> FieldResult<Schema<Handler>> {
|
||||
let span = debug_span!("[GraphQL query] get_schema");
|
||||
let handler = context
|
||||
.handler
|
||||
.get_user_restricted_lister_handler(&context.validation_result);
|
||||
Ok(handler
|
||||
.get_schema()
|
||||
.instrument(span)
|
||||
.await
|
||||
.map(Into::<PublicSchema>::into)
|
||||
.map(Into::into)?)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Debug, Serialize, Deserialize)]
|
||||
@@ -231,15 +253,29 @@ impl<Handler: BackendHandler> User<Handler> {
|
||||
}
|
||||
|
||||
fn first_name(&self) -> &str {
|
||||
self.user.first_name.as_deref().unwrap_or("")
|
||||
self.user
|
||||
.attributes
|
||||
.iter()
|
||||
.find(|a| a.name == "first_name")
|
||||
.map(|a| a.value.unwrap())
|
||||
.unwrap_or("")
|
||||
}
|
||||
|
||||
fn last_name(&self) -> &str {
|
||||
self.user.last_name.as_deref().unwrap_or("")
|
||||
self.user
|
||||
.attributes
|
||||
.iter()
|
||||
.find(|a| a.name == "last_name")
|
||||
.map(|a| a.value.unwrap())
|
||||
.unwrap_or("")
|
||||
}
|
||||
|
||||
fn avatar(&self) -> Option<String> {
|
||||
self.user.avatar.as_ref().map(String::from)
|
||||
self.user
|
||||
.attributes
|
||||
.iter()
|
||||
.find(|a| a.name == "avatar")
|
||||
.map(|a| String::from(&a.value.unwrap::<JpegPhoto>()))
|
||||
}
|
||||
|
||||
fn creation_date(&self) -> chrono::DateTime<chrono::Utc> {
|
||||
@@ -250,6 +286,16 @@ impl<Handler: BackendHandler> User<Handler> {
|
||||
self.user.uuid.as_str()
|
||||
}
|
||||
|
||||
/// User-defined attributes.
|
||||
fn attributes(&self) -> Vec<AttributeValue<Handler, SchemaUserAttributeExtractor>> {
|
||||
self.user
|
||||
.attributes
|
||||
.clone()
|
||||
.into_iter()
|
||||
.map(Into::into)
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// The groups to which this user belongs.
|
||||
async fn groups(&self, context: &Context<Handler>) -> FieldResult<Vec<Group<Handler>>> {
|
||||
let span = debug_span!("[GraphQL query] user::groups");
|
||||
@@ -263,7 +309,14 @@ impl<Handler: BackendHandler> User<Handler> {
|
||||
.get_user_groups(&self.user.user_id)
|
||||
.instrument(span)
|
||||
.await
|
||||
.map(|set| set.into_iter().map(Into::into).collect())?)
|
||||
.map(|set| {
|
||||
let mut groups = set
|
||||
.into_iter()
|
||||
.map(Into::into)
|
||||
.collect::<Vec<Group<Handler>>>();
|
||||
groups.sort_by(|g1, g2| g1.display_name.cmp(&g2.display_name));
|
||||
groups
|
||||
})?)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -292,6 +345,7 @@ pub struct Group<Handler: BackendHandler> {
|
||||
display_name: String,
|
||||
creation_date: chrono::NaiveDateTime,
|
||||
uuid: String,
|
||||
attributes: Vec<DomainAttributeValue>,
|
||||
members: Option<Vec<String>>,
|
||||
_phantom: std::marker::PhantomData<Box<Handler>>,
|
||||
}
|
||||
@@ -310,6 +364,16 @@ impl<Handler: BackendHandler> Group<Handler> {
|
||||
fn uuid(&self) -> String {
|
||||
self.uuid.clone()
|
||||
}
|
||||
|
||||
/// User-defined attributes.
|
||||
fn attributes(&self) -> Vec<AttributeValue<Handler, SchemaGroupAttributeExtractor>> {
|
||||
self.attributes
|
||||
.clone()
|
||||
.into_iter()
|
||||
.map(Into::into)
|
||||
.collect()
|
||||
}
|
||||
|
||||
/// The groups to which this user belongs.
|
||||
async fn users(&self, context: &Context<Handler>) -> FieldResult<Vec<User<Handler>>> {
|
||||
let span = debug_span!("[GraphQL query] group::users");
|
||||
@@ -340,6 +404,7 @@ impl<Handler: BackendHandler> From<GroupDetails> for Group<Handler> {
|
||||
display_name: group_details.display_name,
|
||||
creation_date: group_details.creation_date,
|
||||
uuid: group_details.uuid.into_string(),
|
||||
attributes: group_details.attributes,
|
||||
members: None,
|
||||
_phantom: std::marker::PhantomData,
|
||||
}
|
||||
@@ -353,17 +418,222 @@ impl<Handler: BackendHandler> From<DomainGroup> for Group<Handler> {
|
||||
display_name: group.display_name,
|
||||
creation_date: group.creation_date,
|
||||
uuid: group.uuid.into_string(),
|
||||
attributes: group.attributes,
|
||||
members: Some(group.users.into_iter().map(UserId::into_string).collect()),
|
||||
_phantom: std::marker::PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Debug, Serialize, Deserialize)]
|
||||
pub struct AttributeSchema<Handler: BackendHandler> {
|
||||
schema: DomainAttributeSchema,
|
||||
_phantom: std::marker::PhantomData<Box<Handler>>,
|
||||
}
|
||||
|
||||
#[graphql_object(context = Context<Handler>)]
|
||||
impl<Handler: BackendHandler> AttributeSchema<Handler> {
|
||||
fn name(&self) -> String {
|
||||
self.schema.name.clone()
|
||||
}
|
||||
fn attribute_type(&self) -> String {
|
||||
let name: &'static str = self.schema.attribute_type.into();
|
||||
name.to_owned()
|
||||
}
|
||||
fn is_list(&self) -> bool {
|
||||
self.schema.is_list
|
||||
}
|
||||
fn is_visible(&self) -> bool {
|
||||
self.schema.is_visible
|
||||
}
|
||||
fn is_editable(&self) -> bool {
|
||||
self.schema.is_editable
|
||||
}
|
||||
fn is_hardcoded(&self) -> bool {
|
||||
self.schema.is_hardcoded
|
||||
}
|
||||
}
|
||||
|
||||
impl<Handler: BackendHandler> From<DomainAttributeSchema> for AttributeSchema<Handler> {
|
||||
fn from(value: DomainAttributeSchema) -> Self {
|
||||
Self {
|
||||
schema: value,
|
||||
_phantom: std::marker::PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Debug, Serialize, Deserialize)]
|
||||
pub struct AttributeList<Handler: BackendHandler> {
|
||||
schema: DomainAttributeList,
|
||||
_phantom: std::marker::PhantomData<Box<Handler>>,
|
||||
}
|
||||
|
||||
#[graphql_object(context = Context<Handler>)]
|
||||
impl<Handler: BackendHandler> AttributeList<Handler> {
|
||||
fn attributes(&self) -> Vec<AttributeSchema<Handler>> {
|
||||
self.schema
|
||||
.attributes
|
||||
.clone()
|
||||
.into_iter()
|
||||
.map(Into::into)
|
||||
.collect()
|
||||
}
|
||||
}
|
||||
|
||||
impl<Handler: BackendHandler> From<DomainAttributeList> for AttributeList<Handler> {
|
||||
fn from(value: DomainAttributeList) -> Self {
|
||||
Self {
|
||||
schema: value,
|
||||
_phantom: std::marker::PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Debug, Serialize, Deserialize)]
|
||||
pub struct Schema<Handler: BackendHandler> {
|
||||
schema: DomainSchema,
|
||||
_phantom: std::marker::PhantomData<Box<Handler>>,
|
||||
}
|
||||
|
||||
#[graphql_object(context = Context<Handler>)]
|
||||
impl<Handler: BackendHandler> Schema<Handler> {
|
||||
fn user_schema(&self) -> AttributeList<Handler> {
|
||||
self.schema.get_schema().user_attributes.clone().into()
|
||||
}
|
||||
fn group_schema(&self) -> AttributeList<Handler> {
|
||||
self.schema.get_schema().group_attributes.clone().into()
|
||||
}
|
||||
}
|
||||
|
||||
impl<Handler: BackendHandler> From<DomainSchema> for Schema<Handler> {
|
||||
fn from(value: DomainSchema) -> Self {
|
||||
Self {
|
||||
schema: value,
|
||||
_phantom: std::marker::PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
trait SchemaAttributeExtractor: std::marker::Send {
|
||||
fn get_attributes(schema: &DomainSchema) -> &DomainAttributeList;
|
||||
}
|
||||
|
||||
struct SchemaUserAttributeExtractor;
|
||||
|
||||
impl SchemaAttributeExtractor for SchemaUserAttributeExtractor {
|
||||
fn get_attributes(schema: &DomainSchema) -> &DomainAttributeList {
|
||||
&schema.get_schema().user_attributes
|
||||
}
|
||||
}
|
||||
|
||||
struct SchemaGroupAttributeExtractor;
|
||||
|
||||
impl SchemaAttributeExtractor for SchemaGroupAttributeExtractor {
|
||||
fn get_attributes(schema: &DomainSchema) -> &DomainAttributeList {
|
||||
&schema.get_schema().group_attributes
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Debug, Serialize, Deserialize)]
|
||||
pub struct AttributeValue<Handler: BackendHandler, Extractor> {
|
||||
attribute: DomainAttributeValue,
|
||||
_phantom: std::marker::PhantomData<Box<Handler>>,
|
||||
_phantom_extractor: std::marker::PhantomData<Extractor>,
|
||||
}
|
||||
|
||||
#[graphql_object(context = Context<Handler>)]
|
||||
impl<Handler: BackendHandler, Extractor: SchemaAttributeExtractor>
|
||||
AttributeValue<Handler, Extractor>
|
||||
{
|
||||
fn name(&self) -> &str {
|
||||
&self.attribute.name
|
||||
}
|
||||
async fn value(&self, context: &Context<Handler>) -> FieldResult<Vec<String>> {
|
||||
let handler = context
|
||||
.handler
|
||||
.get_user_restricted_lister_handler(&context.validation_result);
|
||||
serialize_attribute(
|
||||
&self.attribute,
|
||||
Extractor::get_attributes(&PublicSchema::from(handler.get_schema().await?)),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn serialize_attribute(
|
||||
attribute: &DomainAttributeValue,
|
||||
attributes: &DomainAttributeList,
|
||||
) -> FieldResult<Vec<String>> {
|
||||
let convert_date = |date| chrono::Utc.from_utc_datetime(&date).to_rfc3339();
|
||||
attributes
|
||||
.get_attribute_type(&attribute.name)
|
||||
.map(|attribute_type| {
|
||||
match attribute_type {
|
||||
(AttributeType::String, false) => {
|
||||
vec![attribute.value.unwrap::<String>()]
|
||||
}
|
||||
(AttributeType::Integer, false) => {
|
||||
// LDAP integers are encoded as strings.
|
||||
vec![attribute.value.unwrap::<i64>().to_string()]
|
||||
}
|
||||
(AttributeType::JpegPhoto, false) => {
|
||||
vec![String::from(&attribute.value.unwrap::<JpegPhoto>())]
|
||||
}
|
||||
(AttributeType::DateTime, false) => {
|
||||
vec![convert_date(attribute.value.unwrap::<NaiveDateTime>())]
|
||||
}
|
||||
(AttributeType::String, true) => attribute
|
||||
.value
|
||||
.unwrap::<Vec<String>>()
|
||||
.into_iter()
|
||||
.collect(),
|
||||
(AttributeType::Integer, true) => attribute
|
||||
.value
|
||||
.unwrap::<Vec<i64>>()
|
||||
.into_iter()
|
||||
.map(|i| i.to_string())
|
||||
.collect(),
|
||||
(AttributeType::JpegPhoto, true) => attribute
|
||||
.value
|
||||
.unwrap::<Vec<JpegPhoto>>()
|
||||
.iter()
|
||||
.map(String::from)
|
||||
.collect(),
|
||||
(AttributeType::DateTime, true) => attribute
|
||||
.value
|
||||
.unwrap::<Vec<NaiveDateTime>>()
|
||||
.into_iter()
|
||||
.map(convert_date)
|
||||
.collect(),
|
||||
}
|
||||
})
|
||||
.ok_or_else(|| FieldError::from(anyhow::anyhow!("Unknown attribute: {}", &attribute.name)))
|
||||
}
|
||||
|
||||
impl<Handler: BackendHandler, Extractor> From<DomainAttributeValue>
|
||||
for AttributeValue<Handler, Extractor>
|
||||
{
|
||||
fn from(value: DomainAttributeValue) -> Self {
|
||||
Self {
|
||||
attribute: value,
|
||||
_phantom: std::marker::PhantomData,
|
||||
_phantom_extractor: std::marker::PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::{
|
||||
domain::handler::MockTestBackendHandler, infra::access_control::ValidationResults,
|
||||
domain::{
|
||||
handler::AttributeList,
|
||||
types::{AttributeType, Serialized},
|
||||
},
|
||||
infra::{
|
||||
access_control::{Permission, ValidationResults},
|
||||
test_utils::{setup_default_schema, MockTestBackendHandler},
|
||||
},
|
||||
};
|
||||
use chrono::TimeZone;
|
||||
use juniper::{
|
||||
@@ -371,6 +641,7 @@ mod tests {
|
||||
RootNode, Variables,
|
||||
};
|
||||
use mockall::predicate::eq;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::collections::HashSet;
|
||||
|
||||
fn schema<'q, C, Q>(query_root: Q) -> RootNode<'q, Q, EmptyMutation<C>, EmptySubscription<C>>
|
||||
@@ -392,15 +663,58 @@ mod tests {
|
||||
email
|
||||
creationDate
|
||||
uuid
|
||||
attributes {
|
||||
name
|
||||
value
|
||||
}
|
||||
groups {
|
||||
id
|
||||
displayName
|
||||
creationDate
|
||||
uuid
|
||||
attributes {
|
||||
name
|
||||
value
|
||||
}
|
||||
}
|
||||
}
|
||||
}"#;
|
||||
|
||||
let mut mock = MockTestBackendHandler::new();
|
||||
mock.expect_get_schema().returning(|| {
|
||||
Ok(crate::domain::handler::Schema {
|
||||
user_attributes: DomainAttributeList {
|
||||
attributes: vec![
|
||||
DomainAttributeSchema {
|
||||
name: "first_name".to_owned(),
|
||||
attribute_type: AttributeType::String,
|
||||
is_list: false,
|
||||
is_visible: true,
|
||||
is_editable: true,
|
||||
is_hardcoded: true,
|
||||
},
|
||||
DomainAttributeSchema {
|
||||
name: "last_name".to_owned(),
|
||||
attribute_type: AttributeType::String,
|
||||
is_list: false,
|
||||
is_visible: true,
|
||||
is_editable: true,
|
||||
is_hardcoded: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
group_attributes: DomainAttributeList {
|
||||
attributes: vec![DomainAttributeSchema {
|
||||
name: "club_name".to_owned(),
|
||||
attribute_type: AttributeType::String,
|
||||
is_list: false,
|
||||
is_visible: true,
|
||||
is_editable: true,
|
||||
is_hardcoded: false,
|
||||
}],
|
||||
},
|
||||
})
|
||||
});
|
||||
mock.expect_get_user_details()
|
||||
.with(eq(UserId::new("bob")))
|
||||
.return_once(|_| {
|
||||
@@ -409,6 +723,16 @@ mod tests {
|
||||
email: "bob@bobbers.on".to_string(),
|
||||
creation_date: chrono::Utc.timestamp_millis_opt(42).unwrap().naive_utc(),
|
||||
uuid: crate::uuid!("b1a2a3a4b1b2c1c2d1d2d3d4d5d6d7d8"),
|
||||
attributes: vec![
|
||||
DomainAttributeValue {
|
||||
name: "first_name".to_owned(),
|
||||
value: Serialized::from("Bob"),
|
||||
},
|
||||
DomainAttributeValue {
|
||||
name: "last_name".to_owned(),
|
||||
value: Serialized::from("Bobberson"),
|
||||
},
|
||||
],
|
||||
..Default::default()
|
||||
})
|
||||
});
|
||||
@@ -418,6 +742,17 @@ mod tests {
|
||||
display_name: "Bobbersons".to_string(),
|
||||
creation_date: chrono::Utc.timestamp_nanos(42).naive_utc(),
|
||||
uuid: crate::uuid!("a1a2a3a4b1b2c1c2d1d2d3d4d5d6d7d8"),
|
||||
attributes: vec![DomainAttributeValue {
|
||||
name: "club_name".to_owned(),
|
||||
value: Serialized::from("Gang of Four"),
|
||||
}],
|
||||
});
|
||||
groups.insert(GroupDetails {
|
||||
group_id: GroupId(7),
|
||||
display_name: "Jefferees".to_string(),
|
||||
creation_date: chrono::Utc.timestamp_nanos(12).naive_utc(),
|
||||
uuid: crate::uuid!("b1a2a3a4b1b2c1c2d1d2d3d4d5d6d7d8"),
|
||||
attributes: Vec::new(),
|
||||
});
|
||||
mock.expect_get_user_groups()
|
||||
.with(eq(UserId::new("bob")))
|
||||
@@ -437,10 +772,31 @@ mod tests {
|
||||
"email": "bob@bobbers.on",
|
||||
"creationDate": "1970-01-01T00:00:00.042+00:00",
|
||||
"uuid": "b1a2a3a4-b1b2-c1c2-d1d2-d3d4d5d6d7d8",
|
||||
"attributes": [{
|
||||
"name": "first_name",
|
||||
"value": ["Bob"],
|
||||
},
|
||||
{
|
||||
"name": "last_name",
|
||||
"value": ["Bobberson"],
|
||||
}],
|
||||
"groups": [{
|
||||
"id": 3,
|
||||
"displayName": "Bobbersons",
|
||||
"creationDate": "1970-01-01T00:00:00.000000042+00:00",
|
||||
"uuid": "a1a2a3a4-b1b2-c1c2-d1d2-d3d4d5d6d7d8"
|
||||
"uuid": "a1a2a3a4-b1b2-c1c2-d1d2-d3d4d5d6d7d8",
|
||||
"attributes": [{
|
||||
"name": "club_name",
|
||||
"value": ["Gang of Four"],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
"id": 7,
|
||||
"displayName": "Jefferees",
|
||||
"creationDate": "1970-01-01T00:00:00.000000012+00:00",
|
||||
"uuid": "b1a2a3a4-b1b2-c1c2-d1d2-d3d4d5d6d7d8",
|
||||
"attributes": [],
|
||||
}]
|
||||
}
|
||||
}),
|
||||
@@ -461,6 +817,10 @@ mod tests {
|
||||
{eq: {
|
||||
field: "email"
|
||||
value: "robert@bobbers.on"
|
||||
}},
|
||||
{eq: {
|
||||
field: "firstName"
|
||||
value: "robert"
|
||||
}}
|
||||
]}) {
|
||||
id
|
||||
@@ -475,7 +835,11 @@ mod tests {
|
||||
DomainRequestFilter::UserId(UserId::new("bob")),
|
||||
DomainRequestFilter::Equality(
|
||||
UserColumn::Email,
|
||||
"robert@bobbers.on".to_string(),
|
||||
"robert@bobbers.on".to_owned(),
|
||||
),
|
||||
DomainRequestFilter::AttributeEquality(
|
||||
"first_name".to_owned(),
|
||||
"robert".to_owned(),
|
||||
),
|
||||
]))),
|
||||
eq(false),
|
||||
@@ -485,7 +849,7 @@ mod tests {
|
||||
DomainUserAndGroups {
|
||||
user: DomainUser {
|
||||
user_id: UserId::new("bob"),
|
||||
email: "bob@bobbers.on".to_string(),
|
||||
email: "bob@bobbers.on".to_owned(),
|
||||
..Default::default()
|
||||
},
|
||||
groups: None,
|
||||
@@ -493,7 +857,7 @@ mod tests {
|
||||
DomainUserAndGroups {
|
||||
user: DomainUser {
|
||||
user_id: UserId::new("robert"),
|
||||
email: "robert@bobbers.on".to_string(),
|
||||
email: "robert@bobbers.on".to_owned(),
|
||||
..Default::default()
|
||||
},
|
||||
groups: None,
|
||||
@@ -525,4 +889,219 @@ mod tests {
|
||||
))
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn get_schema() {
|
||||
const QUERY: &str = r#"{
|
||||
schema {
|
||||
userSchema {
|
||||
attributes {
|
||||
name
|
||||
attributeType
|
||||
isList
|
||||
isVisible
|
||||
isEditable
|
||||
isHardcoded
|
||||
}
|
||||
}
|
||||
groupSchema {
|
||||
attributes {
|
||||
name
|
||||
attributeType
|
||||
isList
|
||||
isVisible
|
||||
isEditable
|
||||
isHardcoded
|
||||
}
|
||||
}
|
||||
}
|
||||
}"#;
|
||||
|
||||
let mut mock = MockTestBackendHandler::new();
|
||||
|
||||
setup_default_schema(&mut mock);
|
||||
|
||||
let context =
|
||||
Context::<MockTestBackendHandler>::new_for_tests(mock, ValidationResults::admin());
|
||||
|
||||
let schema = schema(Query::<MockTestBackendHandler>::new());
|
||||
assert_eq!(
|
||||
execute(QUERY, None, &schema, &Variables::new(), &context).await,
|
||||
Ok((
|
||||
graphql_value!(
|
||||
{
|
||||
"schema": {
|
||||
"userSchema": {
|
||||
"attributes": [
|
||||
{
|
||||
"name": "avatar",
|
||||
"attributeType": "JpegPhoto",
|
||||
"isList": false,
|
||||
"isVisible": true,
|
||||
"isEditable": true,
|
||||
"isHardcoded": true,
|
||||
},
|
||||
{
|
||||
"name": "creation_date",
|
||||
"attributeType": "DateTime",
|
||||
"isList": false,
|
||||
"isVisible": true,
|
||||
"isEditable": false,
|
||||
"isHardcoded": true,
|
||||
},
|
||||
{
|
||||
"name": "display_name",
|
||||
"attributeType": "String",
|
||||
"isList": false,
|
||||
"isVisible": true,
|
||||
"isEditable": true,
|
||||
"isHardcoded": true,
|
||||
},
|
||||
{
|
||||
"name": "first_name",
|
||||
"attributeType": "String",
|
||||
"isList": false,
|
||||
"isVisible": true,
|
||||
"isEditable": true,
|
||||
"isHardcoded": true,
|
||||
},
|
||||
{
|
||||
"name": "last_name",
|
||||
"attributeType": "String",
|
||||
"isList": false,
|
||||
"isVisible": true,
|
||||
"isEditable": true,
|
||||
"isHardcoded": true,
|
||||
},
|
||||
{
|
||||
"name": "mail",
|
||||
"attributeType": "String",
|
||||
"isList": false,
|
||||
"isVisible": true,
|
||||
"isEditable": true,
|
||||
"isHardcoded": true,
|
||||
},
|
||||
{
|
||||
"name": "user_id",
|
||||
"attributeType": "String",
|
||||
"isList": false,
|
||||
"isVisible": true,
|
||||
"isEditable": false,
|
||||
"isHardcoded": true,
|
||||
},
|
||||
{
|
||||
"name": "uuid",
|
||||
"attributeType": "String",
|
||||
"isList": false,
|
||||
"isVisible": true,
|
||||
"isEditable": false,
|
||||
"isHardcoded": true,
|
||||
},
|
||||
]
|
||||
},
|
||||
"groupSchema": {
|
||||
"attributes": [
|
||||
{
|
||||
"name": "creation_date",
|
||||
"attributeType": "DateTime",
|
||||
"isList": false,
|
||||
"isVisible": true,
|
||||
"isEditable": false,
|
||||
"isHardcoded": true,
|
||||
},
|
||||
{
|
||||
"name": "display_name",
|
||||
"attributeType": "String",
|
||||
"isList": false,
|
||||
"isVisible": true,
|
||||
"isEditable": true,
|
||||
"isHardcoded": true,
|
||||
},
|
||||
{
|
||||
"name": "group_id",
|
||||
"attributeType": "Integer",
|
||||
"isList": false,
|
||||
"isVisible": true,
|
||||
"isEditable": false,
|
||||
"isHardcoded": true,
|
||||
},
|
||||
{
|
||||
"name": "uuid",
|
||||
"attributeType": "String",
|
||||
"isList": false,
|
||||
"isVisible": true,
|
||||
"isEditable": false,
|
||||
"isHardcoded": true,
|
||||
},
|
||||
]
|
||||
}
|
||||
}
|
||||
}),
|
||||
vec![]
|
||||
))
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn regular_user_doesnt_see_non_visible_attributes() {
|
||||
const QUERY: &str = r#"{
|
||||
schema {
|
||||
userSchema {
|
||||
attributes {
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
}"#;
|
||||
|
||||
let mut mock = MockTestBackendHandler::new();
|
||||
|
||||
mock.expect_get_schema().times(1).return_once(|| {
|
||||
Ok(crate::domain::handler::Schema {
|
||||
user_attributes: AttributeList {
|
||||
attributes: vec![crate::domain::handler::AttributeSchema {
|
||||
name: "invisible".to_owned(),
|
||||
attribute_type: AttributeType::JpegPhoto,
|
||||
is_list: false,
|
||||
is_visible: false,
|
||||
is_editable: true,
|
||||
is_hardcoded: true,
|
||||
}],
|
||||
},
|
||||
group_attributes: AttributeList {
|
||||
attributes: Vec::new(),
|
||||
},
|
||||
})
|
||||
});
|
||||
|
||||
let context = Context::<MockTestBackendHandler>::new_for_tests(
|
||||
mock,
|
||||
ValidationResults {
|
||||
user: UserId::new("bob"),
|
||||
permission: Permission::Regular,
|
||||
},
|
||||
);
|
||||
|
||||
let schema = schema(Query::<MockTestBackendHandler>::new());
|
||||
assert_eq!(
|
||||
execute(QUERY, None, &schema, &Variables::new(), &context).await,
|
||||
Ok((
|
||||
graphql_value!(
|
||||
{
|
||||
"schema": {
|
||||
"userSchema": {
|
||||
"attributes": [
|
||||
{"name": "creation_date"},
|
||||
{"name": "display_name"},
|
||||
{"name": "mail"},
|
||||
{"name": "user_id"},
|
||||
{"name": "uuid"},
|
||||
]
|
||||
}
|
||||
}
|
||||
} ),
|
||||
vec![]
|
||||
))
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,8 +19,8 @@ where
|
||||
{
|
||||
use tokio_stream::StreamExt;
|
||||
let (r, w) = tokio::io::split(stream);
|
||||
let mut requests = FramedRead::new(r, LdapCodec);
|
||||
let mut resp = FramedWrite::new(w, LdapCodec);
|
||||
let mut requests = FramedRead::new(r, LdapCodec::default());
|
||||
let mut resp = FramedWrite::new(w, LdapCodec::default());
|
||||
|
||||
resp.send(LdapMsg {
|
||||
msgid: 0,
|
||||
@@ -69,7 +69,7 @@ where
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[instrument(skip_all, level = "info", err)]
|
||||
#[instrument(level = "info", err)]
|
||||
pub async fn check_ldap(port: u16) -> Result<()> {
|
||||
check_ldap_endpoint(TcpStream::connect(format!("localhost:{}", port)).await?).await
|
||||
}
|
||||
@@ -126,7 +126,7 @@ fn get_tls_connector(ldaps_options: &LdapsOptions) -> Result<RustlsTlsConnector>
|
||||
Ok(std::sync::Arc::new(client_config).into())
|
||||
}
|
||||
|
||||
#[instrument(skip_all, level = "info", err)]
|
||||
#[instrument(skip_all, level = "info", err, fields(port = %ldaps_options.port))]
|
||||
pub async fn check_ldaps(ldaps_options: &LdapsOptions) -> Result<()> {
|
||||
if !ldaps_options.enabled {
|
||||
info!("LDAPS not enabled");
|
||||
@@ -150,7 +150,7 @@ pub async fn check_ldaps(ldaps_options: &LdapsOptions) -> Result<()> {
|
||||
.await
|
||||
}
|
||||
|
||||
#[instrument(skip_all, level = "info", err)]
|
||||
#[instrument(level = "info", err)]
|
||||
pub async fn check_api(port: u16) -> Result<()> {
|
||||
reqwest::get(format!("http://localhost:{}/health", port))
|
||||
.await?
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
use sea_orm::{
|
||||
sea_query::{self, ColumnDef, ForeignKey, ForeignKeyAction, Iden, Table},
|
||||
ConnectionTrait,
|
||||
sea_query::{ColumnDef, ForeignKey, ForeignKeyAction, Table},
|
||||
ConnectionTrait, DeriveIden,
|
||||
};
|
||||
|
||||
pub use crate::domain::{sql_migrations::Users, sql_tables::DbConnection};
|
||||
|
||||
/// Contains the refresh tokens for a given user.
|
||||
#[derive(Iden)]
|
||||
#[derive(DeriveIden)]
|
||||
pub enum JwtRefreshStorage {
|
||||
Table,
|
||||
RefreshTokenHash,
|
||||
@@ -15,7 +15,7 @@ pub enum JwtRefreshStorage {
|
||||
}
|
||||
|
||||
/// Contains the blacklisted JWT that haven't expired yet.
|
||||
#[derive(Iden)]
|
||||
#[derive(DeriveIden)]
|
||||
pub enum JwtStorage {
|
||||
Table,
|
||||
JwtHash,
|
||||
@@ -25,7 +25,7 @@ pub enum JwtStorage {
|
||||
}
|
||||
|
||||
/// Contains the temporary tokens to reset the password, sent by email.
|
||||
#[derive(Iden)]
|
||||
#[derive(DeriveIden)]
|
||||
pub enum PasswordResetTokens {
|
||||
Table,
|
||||
Token,
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
use crate::{
|
||||
domain::{
|
||||
handler::{BackendHandler, BindRequest, CreateUserRequest, LoginHandler},
|
||||
handler::{
|
||||
BackendHandler, BindRequest, CreateUserRequest, LoginHandler, SchemaBackendHandler,
|
||||
},
|
||||
ldap::{
|
||||
error::{LdapError, LdapResult},
|
||||
group::{convert_groups_to_ldap_op, get_groups_list},
|
||||
@@ -20,9 +22,10 @@ use crate::{
|
||||
use anyhow::Result;
|
||||
use ldap3_proto::proto::{
|
||||
LdapAddRequest, LdapBindCred, LdapBindRequest, LdapBindResponse, LdapCompareRequest,
|
||||
LdapDerefAliases, LdapExtendedRequest, LdapExtendedResponse, LdapFilter, LdapOp,
|
||||
LdapPartialAttribute, LdapPasswordModifyRequest, LdapResult as LdapResultOp, LdapResultCode,
|
||||
LdapSearchRequest, LdapSearchResultEntry, LdapSearchScope,
|
||||
LdapDerefAliases, LdapExtendedRequest, LdapExtendedResponse, LdapFilter, LdapModify,
|
||||
LdapModifyRequest, LdapModifyType, LdapOp, LdapPartialAttribute, LdapPasswordModifyRequest,
|
||||
LdapResult as LdapResultOp, LdapResultCode, LdapSearchRequest, LdapSearchResultEntry,
|
||||
LdapSearchScope,
|
||||
};
|
||||
use std::collections::HashMap;
|
||||
use tracing::{debug, instrument, warn};
|
||||
@@ -37,11 +40,23 @@ enum SearchScope {
|
||||
Groups,
|
||||
User(LdapFilter),
|
||||
Group(LdapFilter),
|
||||
UserOuOnly,
|
||||
GroupOuOnly,
|
||||
Unknown,
|
||||
Invalid,
|
||||
}
|
||||
|
||||
fn get_search_scope(base_dn: &[(String, String)], dn_parts: &[(String, String)]) -> SearchScope {
|
||||
enum InternalSearchResults {
|
||||
UsersAndGroups(Vec<UserAndGroups>, Vec<Group>),
|
||||
Raw(Vec<LdapOp>),
|
||||
Empty,
|
||||
}
|
||||
|
||||
fn get_search_scope(
|
||||
base_dn: &[(String, String)],
|
||||
dn_parts: &[(String, String)],
|
||||
ldap_scope: &LdapSearchScope,
|
||||
) -> SearchScope {
|
||||
let base_dn_len = base_dn.len();
|
||||
if !is_subtree(dn_parts, base_dn) {
|
||||
SearchScope::Invalid
|
||||
@@ -50,11 +65,19 @@ fn get_search_scope(base_dn: &[(String, String)], dn_parts: &[(String, String)])
|
||||
} else if dn_parts.len() == base_dn_len + 1
|
||||
&& dn_parts[0] == ("ou".to_string(), "people".to_string())
|
||||
{
|
||||
SearchScope::Users
|
||||
if matches!(ldap_scope, LdapSearchScope::Base) {
|
||||
SearchScope::UserOuOnly
|
||||
} else {
|
||||
SearchScope::Users
|
||||
}
|
||||
} else if dn_parts.len() == base_dn_len + 1
|
||||
&& dn_parts[0] == ("ou".to_string(), "groups".to_string())
|
||||
{
|
||||
SearchScope::Groups
|
||||
if matches!(ldap_scope, LdapSearchScope::Base) {
|
||||
SearchScope::GroupOuOnly
|
||||
} else {
|
||||
SearchScope::Groups
|
||||
}
|
||||
} else if dn_parts.len() == base_dn_len + 2
|
||||
&& dn_parts[1] == ("ou".to_string(), "people".to_string())
|
||||
{
|
||||
@@ -81,7 +104,7 @@ fn make_search_request<S: Into<String>>(
|
||||
) -> LdapSearchRequest {
|
||||
LdapSearchRequest {
|
||||
base: base.to_string(),
|
||||
scope: LdapSearchScope::Base,
|
||||
scope: LdapSearchScope::Subtree,
|
||||
aliases: LdapDerefAliases::Never,
|
||||
sizelimit: 0,
|
||||
timelimit: 0,
|
||||
@@ -126,6 +149,15 @@ fn make_extended_response(code: LdapResultCode, message: String) -> LdapOp {
|
||||
})
|
||||
}
|
||||
|
||||
fn make_modify_response(code: LdapResultCode, message: String) -> LdapOp {
|
||||
LdapOp::ModifyResponse(LdapResultOp {
|
||||
code,
|
||||
matcheddn: "".to_string(),
|
||||
message,
|
||||
referral: vec![],
|
||||
})
|
||||
}
|
||||
|
||||
fn root_dse_response(base_dn: &str) -> LdapOp {
|
||||
LdapOp::SearchResultEntry(LdapSearchResultEntry {
|
||||
dn: "".to_string(),
|
||||
@@ -231,9 +263,8 @@ impl<Backend: BackendHandler + LoginHandler + OpaqueHandler> LdapHandler<Backend
|
||||
)
|
||||
}
|
||||
|
||||
#[instrument(skip_all, level = "debug")]
|
||||
#[instrument(skip_all, level = "debug", fields(dn = %request.dn))]
|
||||
pub async fn do_bind(&mut self, request: &LdapBindRequest) -> (LdapResultCode, String) {
|
||||
debug!("DN: {}", &request.dn);
|
||||
let user_id = match get_user_id_from_distinguished_name(
|
||||
&request.dn.to_ascii_lowercase(),
|
||||
&self.ldap_info.base_dn,
|
||||
@@ -268,7 +299,7 @@ impl<Backend: BackendHandler + LoginHandler + OpaqueHandler> LdapHandler<Backend
|
||||
&self,
|
||||
backend_handler: &B,
|
||||
user: &UserId,
|
||||
password: &str,
|
||||
password: &[u8],
|
||||
) -> Result<()> {
|
||||
use lldap_auth::*;
|
||||
let mut rng = rand::rngs::OsRng;
|
||||
@@ -332,7 +363,7 @@ impl<Backend: BackendHandler + LoginHandler + OpaqueHandler> LdapHandler<Backend
|
||||
),
|
||||
})
|
||||
} else if let Err(e) = self
|
||||
.change_password(self.get_opaque_handler(), &uid, password)
|
||||
.change_password(self.get_opaque_handler(), &uid, password.as_bytes())
|
||||
.await
|
||||
{
|
||||
Err(LdapError {
|
||||
@@ -372,6 +403,104 @@ impl<Backend: BackendHandler + LoginHandler + OpaqueHandler> LdapHandler<Backend
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_modify_change(
|
||||
&mut self,
|
||||
user_id: &UserId,
|
||||
credentials: &ValidationResults,
|
||||
user_is_admin: bool,
|
||||
change: &LdapModify,
|
||||
) -> LdapResult<()> {
|
||||
if change.modification.atype.to_ascii_lowercase() != "userpassword"
|
||||
|| change.operation != LdapModifyType::Replace
|
||||
{
|
||||
return Err(LdapError {
|
||||
code: LdapResultCode::UnwillingToPerform,
|
||||
message: format!(
|
||||
r#"Unsupported operation: `{:?}` for `{}`"#,
|
||||
change.operation, change.modification.atype
|
||||
),
|
||||
});
|
||||
}
|
||||
if !credentials.can_change_password(user_id, user_is_admin) {
|
||||
return Err(LdapError {
|
||||
code: LdapResultCode::InsufficentAccessRights,
|
||||
message: format!(
|
||||
r#"User `{}` cannot modify the password of user `{}`"#,
|
||||
&credentials.user, &user_id
|
||||
),
|
||||
});
|
||||
}
|
||||
if let [value] = &change.modification.vals.as_slice() {
|
||||
self.change_password(self.get_opaque_handler(), user_id, value)
|
||||
.await
|
||||
.map_err(|e| LdapError {
|
||||
code: LdapResultCode::Other,
|
||||
message: format!("Error while changing the password: {:#?}", e),
|
||||
})?;
|
||||
} else {
|
||||
return Err(LdapError {
|
||||
code: LdapResultCode::InvalidAttributeSyntax,
|
||||
message: format!(
|
||||
r#"Wrong number of values for password attribute: {}"#,
|
||||
change.modification.vals.len()
|
||||
),
|
||||
});
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn handle_modify_request(
|
||||
&mut self,
|
||||
request: &LdapModifyRequest,
|
||||
) -> LdapResult<Vec<LdapOp>> {
|
||||
let credentials = self
|
||||
.user_info
|
||||
.as_ref()
|
||||
.ok_or_else(|| LdapError {
|
||||
code: LdapResultCode::InsufficentAccessRights,
|
||||
message: "No user currently bound".to_string(),
|
||||
})?
|
||||
.clone();
|
||||
match get_user_id_from_distinguished_name(
|
||||
&request.dn,
|
||||
&self.ldap_info.base_dn,
|
||||
&self.ldap_info.base_dn_str,
|
||||
) {
|
||||
Ok(uid) => {
|
||||
let user_is_admin = self
|
||||
.backend_handler
|
||||
.get_readable_handler(&credentials, &uid)
|
||||
.expect("Unexpected permission error")
|
||||
.get_user_groups(&uid)
|
||||
.await
|
||||
.map_err(|e| LdapError {
|
||||
code: LdapResultCode::OperationsError,
|
||||
message: format!("Internal error while requesting user's groups: {:#?}", e),
|
||||
})?
|
||||
.iter()
|
||||
.any(|g| g.display_name == "lldap_admin");
|
||||
for change in &request.changes {
|
||||
self.handle_modify_change(&uid, &credentials, user_is_admin, change)
|
||||
.await?
|
||||
}
|
||||
Ok(vec![make_modify_response(
|
||||
LdapResultCode::Success,
|
||||
String::new(),
|
||||
)])
|
||||
}
|
||||
Err(e) => Err(LdapError {
|
||||
code: LdapResultCode::InvalidDNSyntax,
|
||||
message: format!("Invalid username: {}", e),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
async fn do_modify_request(&mut self, request: &LdapModifyRequest) -> Vec<LdapOp> {
|
||||
self.handle_modify_request(request)
|
||||
.await
|
||||
.unwrap_or_else(|e: LdapError| vec![make_modify_response(e.code, e.message)])
|
||||
}
|
||||
|
||||
pub async fn do_search_or_dse(
|
||||
&mut self,
|
||||
request: &LdapSearchRequest,
|
||||
@@ -394,9 +523,9 @@ impl<Backend: BackendHandler + LoginHandler + OpaqueHandler> LdapHandler<Backend
|
||||
&self,
|
||||
backend_handler: &impl UserAndGroupListerBackendHandler,
|
||||
request: &LdapSearchRequest,
|
||||
) -> LdapResult<(Option<Vec<UserAndGroups>>, Option<Vec<Group>>)> {
|
||||
) -> LdapResult<InternalSearchResults> {
|
||||
let dn_parts = parse_distinguished_name(&request.base.to_ascii_lowercase())?;
|
||||
let scope = get_search_scope(&self.ldap_info.base_dn, &dn_parts);
|
||||
let scope = get_search_scope(&self.ldap_info.base_dn, &dn_parts, &request.scope);
|
||||
debug!(?request.base, ?scope);
|
||||
// Disambiguate the lifetimes.
|
||||
fn cast<'a, T, R>(x: T) -> T
|
||||
@@ -424,26 +553,41 @@ impl<Backend: BackendHandler + LoginHandler + OpaqueHandler> LdapHandler<Backend
|
||||
get_groups_list(&self.ldap_info, filter, &request.base, backend_handler).await
|
||||
});
|
||||
Ok(match scope {
|
||||
SearchScope::Global => (
|
||||
Some(get_user_list(&request.filter).await?),
|
||||
Some(get_group_list(&request.filter).await?),
|
||||
SearchScope::Global => InternalSearchResults::UsersAndGroups(
|
||||
get_user_list(&request.filter).await?,
|
||||
get_group_list(&request.filter).await?,
|
||||
),
|
||||
SearchScope::Users => InternalSearchResults::UsersAndGroups(
|
||||
get_user_list(&request.filter).await?,
|
||||
Vec::new(),
|
||||
),
|
||||
SearchScope::Groups => InternalSearchResults::UsersAndGroups(
|
||||
Vec::new(),
|
||||
get_group_list(&request.filter).await?,
|
||||
),
|
||||
SearchScope::Users => (Some(get_user_list(&request.filter).await?), None),
|
||||
SearchScope::Groups => (None, Some(get_group_list(&request.filter).await?)),
|
||||
SearchScope::User(filter) => {
|
||||
let filter = LdapFilter::And(vec![request.filter.clone(), filter]);
|
||||
(Some(get_user_list(&filter).await?), None)
|
||||
InternalSearchResults::UsersAndGroups(get_user_list(&filter).await?, Vec::new())
|
||||
}
|
||||
SearchScope::Group(filter) => {
|
||||
let filter = LdapFilter::And(vec![request.filter.clone(), filter]);
|
||||
(None, Some(get_group_list(&filter).await?))
|
||||
InternalSearchResults::UsersAndGroups(Vec::new(), get_group_list(&filter).await?)
|
||||
}
|
||||
SearchScope::UserOuOnly | SearchScope::GroupOuOnly => {
|
||||
InternalSearchResults::Raw(vec![LdapOp::SearchResultEntry(LdapSearchResultEntry {
|
||||
dn: request.base.clone(),
|
||||
attributes: vec![LdapPartialAttribute {
|
||||
atype: "objectClass".to_owned(),
|
||||
vals: vec![b"top".to_vec(), b"organizationalUnit".to_vec()],
|
||||
}],
|
||||
})])
|
||||
}
|
||||
SearchScope::Unknown => {
|
||||
warn!(
|
||||
r#"The requested search tree "{}" matches neither the user subtree "ou=people,{}" nor the group subtree "ou=groups,{}""#,
|
||||
&request.base, &self.ldap_info.base_dn_str, &self.ldap_info.base_dn_str
|
||||
);
|
||||
(None, None)
|
||||
InternalSearchResults::Empty
|
||||
}
|
||||
SearchScope::Invalid => {
|
||||
// Search path is not in our tree, just return an empty success.
|
||||
@@ -451,7 +595,7 @@ impl<Backend: BackendHandler + LoginHandler + OpaqueHandler> LdapHandler<Backend
|
||||
"The specified search tree {:?} is not under the common subtree {:?}",
|
||||
&dn_parts, &self.ldap_info.base_dn
|
||||
);
|
||||
(None, None)
|
||||
InternalSearchResults::Empty
|
||||
}
|
||||
})
|
||||
}
|
||||
@@ -465,26 +609,27 @@ impl<Backend: BackendHandler + LoginHandler + OpaqueHandler> LdapHandler<Backend
|
||||
let backend_handler = self
|
||||
.backend_handler
|
||||
.get_user_restricted_lister_handler(user_info);
|
||||
let (users, groups) = self.do_search_internal(&backend_handler, request).await?;
|
||||
let search_results = self.do_search_internal(&backend_handler, request).await?;
|
||||
|
||||
let mut results = Vec::new();
|
||||
if let Some(users) = users {
|
||||
results.extend(convert_users_to_ldap_op(
|
||||
users,
|
||||
&request.attrs,
|
||||
&self.ldap_info,
|
||||
));
|
||||
}
|
||||
if let Some(groups) = groups {
|
||||
results.extend(convert_groups_to_ldap_op(
|
||||
groups,
|
||||
&request.attrs,
|
||||
&self.ldap_info,
|
||||
&backend_handler.user_filter,
|
||||
));
|
||||
}
|
||||
if results.is_empty() || matches!(results[results.len() - 1], LdapOp::SearchResultEntry(_))
|
||||
{
|
||||
let schema = backend_handler.get_schema().await.map_err(|e| LdapError {
|
||||
code: LdapResultCode::OperationsError,
|
||||
message: format!("Unable to get schema: {:#}", e),
|
||||
})?;
|
||||
let mut results = match search_results {
|
||||
InternalSearchResults::UsersAndGroups(users, groups) => {
|
||||
convert_users_to_ldap_op(users, &request.attrs, &self.ldap_info, &schema)
|
||||
.chain(convert_groups_to_ldap_op(
|
||||
groups,
|
||||
&request.attrs,
|
||||
&self.ldap_info,
|
||||
&backend_handler.user_filter,
|
||||
))
|
||||
.collect()
|
||||
}
|
||||
InternalSearchResults::Raw(raw_results) => raw_results,
|
||||
InternalSearchResults::Empty => Vec::new(),
|
||||
};
|
||||
if !matches!(results.last(), Some(LdapOp::SearchResultDone(_))) {
|
||||
results.push(make_search_success());
|
||||
}
|
||||
Ok(results)
|
||||
@@ -524,6 +669,7 @@ impl<Backend: BackendHandler + LoginHandler + OpaqueHandler> LdapHandler<Backend
|
||||
let attributes: HashMap<String, Vec<u8>> = request
|
||||
.attributes
|
||||
.into_iter()
|
||||
.filter(|a| !a.atype.eq_ignore_ascii_case("objectclass"))
|
||||
.map(parse_attribute)
|
||||
.collect::<LdapResult<_>>()?;
|
||||
fn decode_attribute_value(val: &[u8]) -> LdapResult<String> {
|
||||
@@ -643,6 +789,7 @@ impl<Backend: BackendHandler + LoginHandler + OpaqueHandler> LdapHandler<Backend
|
||||
// No need to notify on unbind (per rfc4511)
|
||||
return None;
|
||||
}
|
||||
LdapOp::ModifyRequest(request) => self.do_modify_request(&request).await,
|
||||
LdapOp::ExtendedRequest(request) => self.do_extended_request(&request).await,
|
||||
LdapOp::AddRequest(request) => self
|
||||
.do_create_user(request)
|
||||
@@ -664,70 +811,17 @@ impl<Backend: BackendHandler + LoginHandler + OpaqueHandler> LdapHandler<Backend
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::{
|
||||
domain::{error::Result, handler::*, opaque_handler::*, types::*},
|
||||
domain::{handler::*, types::*},
|
||||
infra::test_utils::{setup_default_schema, MockTestBackendHandler},
|
||||
uuid,
|
||||
};
|
||||
use async_trait::async_trait;
|
||||
use chrono::TimeZone;
|
||||
use ldap3_proto::proto::{LdapDerefAliases, LdapSearchScope, LdapSubstringFilter};
|
||||
use mockall::predicate::eq;
|
||||
use pretty_assertions::assert_eq;
|
||||
use std::collections::HashSet;
|
||||
use tokio;
|
||||
|
||||
mockall::mock! {
|
||||
pub TestBackendHandler{}
|
||||
impl Clone for TestBackendHandler {
|
||||
fn clone(&self) -> Self;
|
||||
}
|
||||
#[async_trait]
|
||||
impl LoginHandler for TestBackendHandler {
|
||||
async fn bind(&self, request: BindRequest) -> Result<()>;
|
||||
}
|
||||
#[async_trait]
|
||||
impl GroupListerBackendHandler for TestBackendHandler {
|
||||
async fn list_groups(&self, filters: Option<GroupRequestFilter>) -> Result<Vec<Group>>;
|
||||
}
|
||||
#[async_trait]
|
||||
impl GroupBackendHandler for TestBackendHandler {
|
||||
async fn get_group_details(&self, group_id: GroupId) -> Result<GroupDetails>;
|
||||
async fn update_group(&self, request: UpdateGroupRequest) -> Result<()>;
|
||||
async fn create_group(&self, group_name: &str) -> Result<GroupId>;
|
||||
async fn delete_group(&self, group_id: GroupId) -> Result<()>;
|
||||
}
|
||||
#[async_trait]
|
||||
impl UserListerBackendHandler for TestBackendHandler {
|
||||
async fn list_users(&self, filters: Option<UserRequestFilter>, get_groups: bool) -> Result<Vec<UserAndGroups>>;
|
||||
}
|
||||
#[async_trait]
|
||||
impl UserBackendHandler for TestBackendHandler {
|
||||
async fn get_user_details(&self, user_id: &UserId) -> Result<User>;
|
||||
async fn create_user(&self, request: CreateUserRequest) -> Result<()>;
|
||||
async fn update_user(&self, request: UpdateUserRequest) -> Result<()>;
|
||||
async fn delete_user(&self, user_id: &UserId) -> Result<()>;
|
||||
async fn get_user_groups(&self, user_id: &UserId) -> Result<HashSet<GroupDetails>>;
|
||||
async fn add_user_to_group(&self, user_id: &UserId, group_id: GroupId) -> Result<()>;
|
||||
async fn remove_user_from_group(&self, user_id: &UserId, group_id: GroupId) -> Result<()>;
|
||||
}
|
||||
#[async_trait]
|
||||
impl BackendHandler for TestBackendHandler {}
|
||||
#[async_trait]
|
||||
impl OpaqueHandler for TestBackendHandler {
|
||||
async fn login_start(
|
||||
&self,
|
||||
request: login::ClientLoginStartRequest
|
||||
) -> Result<login::ServerLoginStartResponse>;
|
||||
async fn login_finish(&self, request: login::ClientLoginFinishRequest) -> Result<UserId>;
|
||||
async fn registration_start(
|
||||
&self,
|
||||
request: registration::ClientRegistrationStartRequest
|
||||
) -> Result<registration::ServerRegistrationStartResponse>;
|
||||
async fn registration_finish(
|
||||
&self,
|
||||
request: registration::ClientRegistrationFinishRequest
|
||||
) -> Result<()>;
|
||||
}
|
||||
}
|
||||
|
||||
fn make_user_search_request<S: Into<String>>(
|
||||
filter: LdapFilter,
|
||||
attrs: Vec<S>,
|
||||
@@ -762,9 +856,11 @@ mod tests {
|
||||
display_name: group,
|
||||
creation_date: chrono::Utc.timestamp_opt(42, 42).unwrap().naive_utc(),
|
||||
uuid: uuid!("a1a2a3a4b1b2c1c2d1d2d3d4d5d6d7d8"),
|
||||
attributes: Vec::new(),
|
||||
});
|
||||
Ok(set)
|
||||
});
|
||||
setup_default_schema(&mut mock);
|
||||
let mut ldap_handler = LdapHandler::new_for_tests(mock, "dc=Example,dc=com");
|
||||
let request = LdapBindRequest {
|
||||
dn: "uid=test,ou=people,dc=example,dc=coM".to_string(),
|
||||
@@ -847,6 +943,7 @@ mod tests {
|
||||
display_name: "lldap_admin".to_string(),
|
||||
creation_date: chrono::Utc.timestamp_opt(42, 42).unwrap().naive_utc(),
|
||||
uuid: uuid!("a1a2a3a4b1b2c1c2d1d2d3d4d5d6d7d8"),
|
||||
attributes: Vec::new(),
|
||||
});
|
||||
Ok(set)
|
||||
});
|
||||
@@ -933,6 +1030,7 @@ mod tests {
|
||||
display_name: "rockstars".to_string(),
|
||||
creation_date: chrono::Utc.timestamp_opt(42, 42).unwrap().naive_utc(),
|
||||
uuid: uuid!("a1a2a3a4b1b2c1c2d1d2d3d4d5d6d7d8"),
|
||||
attributes: Vec::new(),
|
||||
}]),
|
||||
}])
|
||||
});
|
||||
@@ -1079,9 +1177,17 @@ mod tests {
|
||||
user_id: UserId::new("bob_1"),
|
||||
email: "bob@bobmail.bob".to_string(),
|
||||
display_name: Some("Bôb Böbberson".to_string()),
|
||||
first_name: Some("Bôb".to_string()),
|
||||
last_name: Some("Böbberson".to_string()),
|
||||
uuid: uuid!("698e1d5f-7a40-3151-8745-b9b8a37839da"),
|
||||
attributes: vec![
|
||||
AttributeValue {
|
||||
name: "first_name".to_owned(),
|
||||
value: Serialized::from("Bôb"),
|
||||
},
|
||||
AttributeValue {
|
||||
name: "last_name".to_owned(),
|
||||
value: Serialized::from("Böbberson"),
|
||||
},
|
||||
],
|
||||
..Default::default()
|
||||
},
|
||||
groups: None,
|
||||
@@ -1091,9 +1197,20 @@ mod tests {
|
||||
user_id: UserId::new("jim"),
|
||||
email: "jim@cricket.jim".to_string(),
|
||||
display_name: Some("Jimminy Cricket".to_string()),
|
||||
first_name: Some("Jim".to_string()),
|
||||
last_name: Some("Cricket".to_string()),
|
||||
avatar: Some(JpegPhoto::for_tests()),
|
||||
attributes: vec![
|
||||
AttributeValue {
|
||||
name: "avatar".to_owned(),
|
||||
value: Serialized::from(&JpegPhoto::for_tests()),
|
||||
},
|
||||
AttributeValue {
|
||||
name: "first_name".to_owned(),
|
||||
value: Serialized::from("Jim"),
|
||||
},
|
||||
AttributeValue {
|
||||
name: "last_name".to_owned(),
|
||||
value: Serialized::from("Cricket"),
|
||||
},
|
||||
],
|
||||
uuid: uuid!("04ac75e0-2900-3e21-926c-2f732c26b3fc"),
|
||||
creation_date: Utc
|
||||
.with_ymd_and_hms(2014, 7, 8, 9, 10, 11)
|
||||
@@ -1230,6 +1347,7 @@ mod tests {
|
||||
creation_date: chrono::Utc.timestamp_opt(42, 42).unwrap().naive_utc(),
|
||||
users: vec![UserId::new("bob"), UserId::new("john")],
|
||||
uuid: uuid!("04ac75e0-2900-3e21-926c-2f732c26b3fc"),
|
||||
attributes: Vec::new(),
|
||||
},
|
||||
Group {
|
||||
id: GroupId(3),
|
||||
@@ -1237,6 +1355,7 @@ mod tests {
|
||||
creation_date: chrono::Utc.timestamp_opt(42, 42).unwrap().naive_utc(),
|
||||
users: vec![UserId::new("john")],
|
||||
uuid: uuid!("04ac75e0-2900-3e21-926c-2f732c26b3fc"),
|
||||
attributes: Vec::new(),
|
||||
},
|
||||
])
|
||||
});
|
||||
@@ -1307,6 +1426,7 @@ mod tests {
|
||||
GroupRequestFilter::Member(UserId::new("bob")),
|
||||
GroupRequestFilter::DisplayName("rockstars".to_string()),
|
||||
false.into(),
|
||||
GroupRequestFilter::Uuid(uuid!("04ac75e0-2900-3e21-926c-2f732c26b3fc")),
|
||||
true.into(),
|
||||
true.into(),
|
||||
true.into(),
|
||||
@@ -1327,6 +1447,7 @@ mod tests {
|
||||
creation_date: chrono::Utc.timestamp_opt(42, 42).unwrap().naive_utc(),
|
||||
users: vec![],
|
||||
uuid: uuid!("04ac75e0-2900-3e21-926c-2f732c26b3fc"),
|
||||
attributes: Vec::new(),
|
||||
}])
|
||||
});
|
||||
let mut ldap_handler = setup_bound_admin_handler(mock).await;
|
||||
@@ -1345,6 +1466,10 @@ mod tests {
|
||||
"dn".to_string(),
|
||||
"uid=rockstars,ou=people,dc=example,dc=com".to_string(),
|
||||
),
|
||||
LdapFilter::Equality(
|
||||
"uuid".to_string(),
|
||||
"04ac75e0-2900-3e21-926c-2f732c26b3fc".to_string(),
|
||||
),
|
||||
LdapFilter::Equality("obJEctclass".to_string(), "groupofUniqueNames".to_string()),
|
||||
LdapFilter::Equality("objectclass".to_string(), "groupOfNames".to_string()),
|
||||
LdapFilter::Present("objectclass".to_string()),
|
||||
@@ -1393,6 +1518,7 @@ mod tests {
|
||||
creation_date: chrono::Utc.timestamp_opt(42, 42).unwrap().naive_utc(),
|
||||
users: vec![],
|
||||
uuid: uuid!("04ac75e0-2900-3e21-926c-2f732c26b3fc"),
|
||||
attributes: Vec::new(),
|
||||
}])
|
||||
});
|
||||
let mut ldap_handler = setup_bound_admin_handler(mock).await;
|
||||
@@ -1530,6 +1656,10 @@ mod tests {
|
||||
true.into(),
|
||||
true.into(),
|
||||
false.into(),
|
||||
UserRequestFilter::AttributeEquality(
|
||||
"first_name".to_owned(),
|
||||
"firstname".to_owned(),
|
||||
),
|
||||
false.into(),
|
||||
UserRequestFilter::UserIdSubString(SubStringFilter {
|
||||
initial: Some("iNIt".to_owned()),
|
||||
@@ -1537,7 +1667,7 @@ mod tests {
|
||||
final_: Some("finAl".to_owned()),
|
||||
}),
|
||||
UserRequestFilter::SubString(
|
||||
UserColumn::FirstName,
|
||||
UserColumn::DisplayName,
|
||||
SubStringFilter {
|
||||
initial: Some("iNIt".to_owned()),
|
||||
any: vec!["1".to_owned(), "2aA".to_owned()],
|
||||
@@ -1570,6 +1700,7 @@ mod tests {
|
||||
LdapFilter::Present("objectClass".to_string()),
|
||||
LdapFilter::Present("uid".to_string()),
|
||||
LdapFilter::Present("unknown".to_string()),
|
||||
LdapFilter::Equality("givenname".to_string(), "firstname".to_string()),
|
||||
LdapFilter::Equality("unknown_attribute".to_string(), "randomValue".to_string()),
|
||||
LdapFilter::Substring(
|
||||
"uid".to_owned(),
|
||||
@@ -1580,7 +1711,7 @@ mod tests {
|
||||
},
|
||||
),
|
||||
LdapFilter::Substring(
|
||||
"firstName".to_owned(),
|
||||
"displayName".to_owned(),
|
||||
LdapSubstringFilter {
|
||||
initial: Some("iNIt".to_owned()),
|
||||
any: vec!["1".to_owned(), "2aA".to_owned()],
|
||||
@@ -1596,6 +1727,35 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_search_unsupported_substring_filter() {
|
||||
let mut ldap_handler = setup_bound_admin_handler(MockTestBackendHandler::new()).await;
|
||||
let request = make_user_search_request(
|
||||
LdapFilter::Substring(
|
||||
"uuid".to_owned(),
|
||||
LdapSubstringFilter {
|
||||
initial: Some("iNIt".to_owned()),
|
||||
any: vec!["1".to_owned(), "2aA".to_owned()],
|
||||
final_: Some("finAl".to_owned()),
|
||||
},
|
||||
),
|
||||
vec!["objectClass"],
|
||||
);
|
||||
ldap_handler.do_search_or_dse(&request).await.unwrap_err();
|
||||
let request = make_user_search_request(
|
||||
LdapFilter::Substring(
|
||||
"givenname".to_owned(),
|
||||
LdapSubstringFilter {
|
||||
initial: Some("iNIt".to_owned()),
|
||||
any: vec!["1".to_owned(), "2aA".to_owned()],
|
||||
final_: Some("finAl".to_owned()),
|
||||
},
|
||||
),
|
||||
vec!["objectClass"],
|
||||
);
|
||||
ldap_handler.do_search_or_dse(&request).await.unwrap_err();
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_search_member_of_filter() {
|
||||
let mut mock = MockTestBackendHandler::new();
|
||||
@@ -1652,7 +1812,7 @@ mod tests {
|
||||
.with(
|
||||
eq(Some(UserRequestFilter::And(vec![UserRequestFilter::Or(
|
||||
vec![UserRequestFilter::Not(Box::new(
|
||||
UserRequestFilter::Equality(UserColumn::FirstName, "bob".to_string()),
|
||||
UserRequestFilter::Equality(UserColumn::DisplayName, "bob".to_string()),
|
||||
))],
|
||||
)]))),
|
||||
eq(false),
|
||||
@@ -1670,7 +1830,7 @@ mod tests {
|
||||
let mut ldap_handler = setup_bound_admin_handler(mock).await;
|
||||
let request = make_user_search_request(
|
||||
LdapFilter::And(vec![LdapFilter::Or(vec![LdapFilter::Not(Box::new(
|
||||
LdapFilter::Equality("givenname".to_string(), "bob".to_string()),
|
||||
LdapFilter::Equality("displayname".to_string(), "bob".to_string()),
|
||||
))])]),
|
||||
vec!["objectclass"],
|
||||
);
|
||||
@@ -1703,8 +1863,16 @@ mod tests {
|
||||
user_id: UserId::new("bob_1"),
|
||||
email: "bob@bobmail.bob".to_string(),
|
||||
display_name: Some("Bôb Böbberson".to_string()),
|
||||
first_name: Some("Bôb".to_string()),
|
||||
last_name: Some("Böbberson".to_string()),
|
||||
attributes: vec![
|
||||
AttributeValue {
|
||||
name: "first_name".to_owned(),
|
||||
value: Serialized::from("Bôb"),
|
||||
},
|
||||
AttributeValue {
|
||||
name: "last_name".to_owned(),
|
||||
value: Serialized::from("Böbberson"),
|
||||
},
|
||||
],
|
||||
..Default::default()
|
||||
},
|
||||
groups: None,
|
||||
@@ -1720,6 +1888,7 @@ mod tests {
|
||||
creation_date: chrono::Utc.timestamp_opt(42, 42).unwrap().naive_utc(),
|
||||
users: vec![UserId::new("bob"), UserId::new("john")],
|
||||
uuid: uuid!("04ac75e0-2900-3e21-926c-2f732c26b3fc"),
|
||||
attributes: Vec::new(),
|
||||
}])
|
||||
});
|
||||
let mut ldap_handler = setup_bound_admin_handler(mock).await;
|
||||
@@ -1777,8 +1946,16 @@ mod tests {
|
||||
user_id: UserId::new("bob_1"),
|
||||
email: "bob@bobmail.bob".to_string(),
|
||||
display_name: Some("Bôb Böbberson".to_string()),
|
||||
last_name: Some("Böbberson".to_string()),
|
||||
avatar: Some(JpegPhoto::for_tests()),
|
||||
attributes: vec![
|
||||
AttributeValue {
|
||||
name: "avatar".to_owned(),
|
||||
value: Serialized::from(&JpegPhoto::for_tests()),
|
||||
},
|
||||
AttributeValue {
|
||||
name: "last_name".to_owned(),
|
||||
value: Serialized::from("Böbberson"),
|
||||
},
|
||||
],
|
||||
uuid: uuid!("b4ac75e0-2900-3e21-926c-2f732c26b3fc"),
|
||||
..Default::default()
|
||||
},
|
||||
@@ -1794,6 +1971,7 @@ mod tests {
|
||||
creation_date: chrono::Utc.timestamp_opt(42, 42).unwrap().naive_utc(),
|
||||
users: vec![UserId::new("bob"), UserId::new("john")],
|
||||
uuid: uuid!("04ac75e0-2900-3e21-926c-2f732c26b3fc"),
|
||||
attributes: Vec::new(),
|
||||
}])
|
||||
});
|
||||
let mut ldap_handler = setup_bound_admin_handler(mock).await;
|
||||
@@ -1979,7 +2157,8 @@ mod tests {
|
||||
use lldap_auth::*;
|
||||
let mut rng = rand::rngs::OsRng;
|
||||
let registration_start_request =
|
||||
opaque::client::registration::start_registration("password", &mut rng).unwrap();
|
||||
opaque::client::registration::start_registration("password".as_bytes(), &mut rng)
|
||||
.unwrap();
|
||||
let request = registration::ClientRegistrationStartRequest {
|
||||
username: "bob".to_string(),
|
||||
registration_start_request: registration_start_request.message,
|
||||
@@ -2017,6 +2196,56 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_password_change_modify_request() {
|
||||
let mut mock = MockTestBackendHandler::new();
|
||||
mock.expect_get_user_groups()
|
||||
.with(eq(UserId::new("bob")))
|
||||
.returning(|_| Ok(HashSet::new()));
|
||||
use lldap_auth::*;
|
||||
let mut rng = rand::rngs::OsRng;
|
||||
let registration_start_request =
|
||||
opaque::client::registration::start_registration("password".as_bytes(), &mut rng)
|
||||
.unwrap();
|
||||
let request = registration::ClientRegistrationStartRequest {
|
||||
username: "bob".to_string(),
|
||||
registration_start_request: registration_start_request.message,
|
||||
};
|
||||
let start_response = opaque::server::registration::start_registration(
|
||||
&opaque::server::ServerSetup::new(&mut rng),
|
||||
request.registration_start_request,
|
||||
&request.username,
|
||||
)
|
||||
.unwrap();
|
||||
mock.expect_registration_start().times(1).return_once(|_| {
|
||||
Ok(registration::ServerRegistrationStartResponse {
|
||||
server_data: "".to_string(),
|
||||
registration_response: start_response.message,
|
||||
})
|
||||
});
|
||||
mock.expect_registration_finish()
|
||||
.times(1)
|
||||
.return_once(|_| Ok(()));
|
||||
let mut ldap_handler = setup_bound_admin_handler(mock).await;
|
||||
let request = LdapOp::ModifyRequest(LdapModifyRequest {
|
||||
dn: "uid=bob,ou=people,dc=example,dc=com".to_string(),
|
||||
changes: vec![LdapModify {
|
||||
operation: LdapModifyType::Replace,
|
||||
modification: LdapPartialAttribute {
|
||||
atype: "userPassword".to_owned(),
|
||||
vals: vec!["password".as_bytes().to_vec()],
|
||||
},
|
||||
}],
|
||||
});
|
||||
assert_eq!(
|
||||
ldap_handler.handle_ldap_message(request).await,
|
||||
Some(vec![make_modify_response(
|
||||
LdapResultCode::Success,
|
||||
"".to_string(),
|
||||
)])
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_password_change_password_manager() {
|
||||
let mut mock = MockTestBackendHandler::new();
|
||||
@@ -2026,7 +2255,8 @@ mod tests {
|
||||
use lldap_auth::*;
|
||||
let mut rng = rand::rngs::OsRng;
|
||||
let registration_start_request =
|
||||
opaque::client::registration::start_registration("password", &mut rng).unwrap();
|
||||
opaque::client::registration::start_registration("password".as_bytes(), &mut rng)
|
||||
.unwrap();
|
||||
let request = registration::ClientRegistrationStartRequest {
|
||||
username: "bob".to_string(),
|
||||
registration_start_request: registration_start_request.message,
|
||||
@@ -2123,6 +2353,7 @@ mod tests {
|
||||
display_name: "lldap_admin".to_string(),
|
||||
creation_date: chrono::Utc.timestamp_opt(42, 42).unwrap().naive_utc(),
|
||||
uuid: uuid!("a1a2a3a4b1b2c1c2d1d2d3d4d5d6d7d8"),
|
||||
attributes: Vec::new(),
|
||||
});
|
||||
mock.expect_get_user_groups()
|
||||
.with(eq(UserId::new("bob")))
|
||||
@@ -2235,6 +2466,42 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_create_user_multiple_object_class() {
|
||||
let mut mock = MockTestBackendHandler::new();
|
||||
mock.expect_create_user()
|
||||
.with(eq(CreateUserRequest {
|
||||
user_id: UserId::new("bob"),
|
||||
email: "".to_owned(),
|
||||
display_name: Some("Bob".to_string()),
|
||||
..Default::default()
|
||||
}))
|
||||
.times(1)
|
||||
.return_once(|_| Ok(()));
|
||||
let ldap_handler = setup_bound_admin_handler(mock).await;
|
||||
let request = LdapAddRequest {
|
||||
dn: "uid=bob,ou=people,dc=example,dc=com".to_owned(),
|
||||
attributes: vec![
|
||||
LdapPartialAttribute {
|
||||
atype: "cn".to_owned(),
|
||||
vals: vec![b"Bob".to_vec()],
|
||||
},
|
||||
LdapPartialAttribute {
|
||||
atype: "objectClass".to_owned(),
|
||||
vals: vec![
|
||||
b"top".to_vec(),
|
||||
b"person".to_vec(),
|
||||
b"inetOrgPerson".to_vec(),
|
||||
],
|
||||
},
|
||||
],
|
||||
};
|
||||
assert_eq!(
|
||||
ldap_handler.do_create_user(request).await,
|
||||
Ok(vec![make_add_error(LdapResultCode::Success, String::new())])
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_search_filter_non_attribute() {
|
||||
let mut mock = MockTestBackendHandler::new();
|
||||
@@ -2314,6 +2581,7 @@ mod tests {
|
||||
creation_date: chrono::Utc.timestamp_opt(42, 42).unwrap().naive_utc(),
|
||||
users: vec![UserId::new("bob")],
|
||||
uuid: uuid!("04ac75e0-2900-3e21-926c-2f732c26b3fc"),
|
||||
attributes: Vec::new(),
|
||||
}])
|
||||
});
|
||||
let mut ldap_handler = setup_bound_admin_handler(mock).await;
|
||||
@@ -2407,6 +2675,7 @@ mod tests {
|
||||
creation_date: chrono::Utc.timestamp_opt(42, 42).unwrap().naive_utc(),
|
||||
users: vec![UserId::new("bob")],
|
||||
uuid: uuid!("04ac75e0-2900-3e21-926c-2f732c26b3fc"),
|
||||
attributes: Vec::new(),
|
||||
}])
|
||||
});
|
||||
let mut ldap_handler = setup_bound_admin_handler(mock).await;
|
||||
@@ -2426,4 +2695,32 @@ mod tests {
|
||||
})])
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_user_ou_search() {
|
||||
let mut ldap_handler = setup_bound_readonly_handler(MockTestBackendHandler::new()).await;
|
||||
let request = LdapSearchRequest {
|
||||
base: "ou=people,dc=example,dc=com".to_owned(),
|
||||
scope: LdapSearchScope::Base,
|
||||
aliases: LdapDerefAliases::Never,
|
||||
sizelimit: 0,
|
||||
timelimit: 0,
|
||||
typesonly: false,
|
||||
filter: LdapFilter::And(vec![]),
|
||||
attrs: Vec::new(),
|
||||
};
|
||||
assert_eq!(
|
||||
ldap_handler.do_search_or_dse(&request).await,
|
||||
Ok(vec![
|
||||
LdapOp::SearchResultEntry(LdapSearchResultEntry {
|
||||
dn: "ou=people,dc=example,dc=com".to_owned(),
|
||||
attributes: vec![LdapPartialAttribute {
|
||||
atype: "objectClass".to_owned(),
|
||||
vals: vec![b"top".to_vec(), b"organizationalUnit".to_vec()]
|
||||
}]
|
||||
}),
|
||||
make_search_success()
|
||||
])
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -73,8 +73,8 @@ where
|
||||
use tokio_stream::StreamExt;
|
||||
let (r, w) = tokio::io::split(stream);
|
||||
// Configure the codec etc.
|
||||
let mut requests = FramedRead::new(r, LdapCodec);
|
||||
let mut resp = FramedWrite::new(w, LdapCodec);
|
||||
let mut requests = FramedRead::new(r, LdapCodec::default());
|
||||
let mut resp = FramedWrite::new(w, LdapCodec::default());
|
||||
|
||||
let mut session = LdapHandler::new(
|
||||
AccessControlledBackendHandler::new(backend_handler),
|
||||
|
||||
@@ -3,8 +3,8 @@ use actix_web::{
|
||||
dev::{ServiceRequest, ServiceResponse},
|
||||
Error,
|
||||
};
|
||||
use tracing::{error, info, Span};
|
||||
use tracing_actix_web::{root_span, RootSpanBuilder};
|
||||
use tracing::{debug, error, Span};
|
||||
use tracing_actix_web::RootSpanBuilder;
|
||||
use tracing_subscriber::{filter::EnvFilter, layer::SubscriberExt, util::SubscriberInitExt};
|
||||
|
||||
/// We will define a custom root span builder to capture additional fields, specific
|
||||
@@ -13,11 +13,11 @@ pub struct CustomRootSpanBuilder;
|
||||
|
||||
impl RootSpanBuilder for CustomRootSpanBuilder {
|
||||
fn on_request_start(request: &ServiceRequest) -> Span {
|
||||
let span = root_span!(request);
|
||||
span.in_scope(|| {
|
||||
info!(uri = %request.uri());
|
||||
});
|
||||
span
|
||||
tracing::debug_span!(
|
||||
"HTTP request",
|
||||
method = request.method().to_string(),
|
||||
uri = request.uri().to_string()
|
||||
)
|
||||
}
|
||||
|
||||
fn on_request_end<B>(_: Span, outcome: &Result<ServiceResponse<B>, Error>) {
|
||||
@@ -26,7 +26,7 @@ impl RootSpanBuilder for CustomRootSpanBuilder {
|
||||
if let Some(error) = response.response().error() {
|
||||
error!(?error);
|
||||
} else {
|
||||
info!(status_code = &response.response().status().as_u16());
|
||||
debug!(status_code = &response.response().status().as_u16());
|
||||
}
|
||||
}
|
||||
Err(error) => error!(?error),
|
||||
|
||||
@@ -1,12 +1,18 @@
|
||||
use crate::infra::{cli::SmtpEncryption, configuration::MailOptions};
|
||||
use anyhow::{Ok, Result};
|
||||
use anyhow::{anyhow, Ok, Result};
|
||||
use lettre::{
|
||||
message::Mailbox, transport::smtp::authentication::Credentials, AsyncSmtpTransport,
|
||||
AsyncTransport, Message, Tokio1Executor,
|
||||
};
|
||||
use tracing::debug;
|
||||
|
||||
async fn send_email(to: Mailbox, subject: &str, body: String, options: &MailOptions) -> Result<()> {
|
||||
async fn send_email(
|
||||
to: Mailbox,
|
||||
subject: &str,
|
||||
body: String,
|
||||
options: &MailOptions,
|
||||
server_url: &url::Url,
|
||||
) -> Result<()> {
|
||||
let from = options
|
||||
.from
|
||||
.clone()
|
||||
@@ -17,6 +23,14 @@ async fn send_email(to: Mailbox, subject: &str, body: String, options: &MailOpti
|
||||
&to, &from, &options.user, &options.server, options.port
|
||||
);
|
||||
let email = Message::builder()
|
||||
.message_id(Some(format!(
|
||||
"<{}@{}>",
|
||||
uuid::Uuid::new_v1(
|
||||
uuid::Timestamp::now(uuid::NoContext),
|
||||
"lldap!".as_bytes().try_into().unwrap()
|
||||
),
|
||||
server_url.domain().unwrap_or_default()
|
||||
)))
|
||||
.from(from)
|
||||
.reply_to(reply_to)
|
||||
.to(to)
|
||||
@@ -43,32 +57,49 @@ async fn send_email(to: Mailbox, subject: &str, body: String, options: &MailOpti
|
||||
mailer = mailer.credentials(creds)
|
||||
}
|
||||
|
||||
mailer.port(options.port).build().send(email).await?;
|
||||
Ok(())
|
||||
if let Err(e) = mailer.port(options.port).build().send(email).await {
|
||||
if e.to_string().contains("CorruptMessage") {
|
||||
Err(anyhow!("CorruptMessage returned by lettre, this usually means the SMTP encryption setting is wrong.").context(e))
|
||||
} else {
|
||||
Err(e.into())
|
||||
}
|
||||
} else {
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn send_password_reset_email(
|
||||
username: &str,
|
||||
to: &str,
|
||||
token: &str,
|
||||
domain: &str,
|
||||
server_url: &url::Url,
|
||||
options: &MailOptions,
|
||||
) -> Result<()> {
|
||||
let to = to.parse()?;
|
||||
let mut reset_url = server_url.clone();
|
||||
reset_url
|
||||
.path_segments_mut()
|
||||
.unwrap()
|
||||
.extend(["reset-password", "step2", token]);
|
||||
let body = format!(
|
||||
"Hello {},
|
||||
This email has been sent to you in order to validate your identity.
|
||||
If you did not initiate the process your credentials might have been
|
||||
compromised. You should reset your password and contact an administrator.
|
||||
|
||||
To reset your password please visit the following URL: {}/reset-password/step2/{}
|
||||
To reset your password please visit the following URL: {}
|
||||
|
||||
Please contact an administrator if you did not initiate the process.",
|
||||
username,
|
||||
domain.trim_end_matches('/'),
|
||||
token
|
||||
username, reset_url
|
||||
);
|
||||
send_email(to, "[LLDAP] Password reset requested", body, options).await
|
||||
send_email(
|
||||
to,
|
||||
"[LLDAP] Password reset requested",
|
||||
body,
|
||||
options,
|
||||
server_url,
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn send_test_email(to: Mailbox, options: &MailOptions) -> Result<()> {
|
||||
@@ -77,6 +108,7 @@ pub async fn send_test_email(to: Mailbox, options: &MailOptions) -> Result<()> {
|
||||
"LLDAP test email",
|
||||
"The test is successful! You can send emails from LLDAP".to_string(),
|
||||
options,
|
||||
&url::Url::parse("http://localhost").unwrap(),
|
||||
)
|
||||
.await
|
||||
}
|
||||
|
||||
@@ -10,6 +10,10 @@ pub mod ldap_handler;
|
||||
pub mod ldap_server;
|
||||
pub mod logging;
|
||||
pub mod mail;
|
||||
pub mod schema;
|
||||
pub mod sql_backend_handler;
|
||||
pub mod tcp_backend_handler;
|
||||
pub mod tcp_server;
|
||||
|
||||
#[cfg(test)]
|
||||
pub mod test_utils;
|
||||
|
||||
104
server/src/infra/schema.rs
Normal file
104
server/src/infra/schema.rs
Normal file
@@ -0,0 +1,104 @@
|
||||
use crate::domain::{
|
||||
handler::{AttributeSchema, Schema},
|
||||
types::AttributeType,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(PartialEq, Eq, Debug, Serialize, Deserialize)]
|
||||
pub struct PublicSchema(Schema);
|
||||
|
||||
impl PublicSchema {
|
||||
pub fn get_schema(&self) -> &Schema {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Schema> for PublicSchema {
|
||||
fn from(mut schema: Schema) -> Self {
|
||||
schema.user_attributes.attributes.extend_from_slice(&[
|
||||
AttributeSchema {
|
||||
name: "user_id".to_owned(),
|
||||
attribute_type: AttributeType::String,
|
||||
is_list: false,
|
||||
is_visible: true,
|
||||
is_editable: false,
|
||||
is_hardcoded: true,
|
||||
},
|
||||
AttributeSchema {
|
||||
name: "creation_date".to_owned(),
|
||||
attribute_type: AttributeType::DateTime,
|
||||
is_list: false,
|
||||
is_visible: true,
|
||||
is_editable: false,
|
||||
is_hardcoded: true,
|
||||
},
|
||||
AttributeSchema {
|
||||
name: "mail".to_owned(),
|
||||
attribute_type: AttributeType::String,
|
||||
is_list: false,
|
||||
is_visible: true,
|
||||
is_editable: true,
|
||||
is_hardcoded: true,
|
||||
},
|
||||
AttributeSchema {
|
||||
name: "uuid".to_owned(),
|
||||
attribute_type: AttributeType::String,
|
||||
is_list: false,
|
||||
is_visible: true,
|
||||
is_editable: false,
|
||||
is_hardcoded: true,
|
||||
},
|
||||
AttributeSchema {
|
||||
name: "display_name".to_owned(),
|
||||
attribute_type: AttributeType::String,
|
||||
is_list: false,
|
||||
is_visible: true,
|
||||
is_editable: true,
|
||||
is_hardcoded: true,
|
||||
},
|
||||
]);
|
||||
schema
|
||||
.user_attributes
|
||||
.attributes
|
||||
.sort_by(|a, b| a.name.cmp(&b.name));
|
||||
schema.group_attributes.attributes.extend_from_slice(&[
|
||||
AttributeSchema {
|
||||
name: "group_id".to_owned(),
|
||||
attribute_type: AttributeType::Integer,
|
||||
is_list: false,
|
||||
is_visible: true,
|
||||
is_editable: false,
|
||||
is_hardcoded: true,
|
||||
},
|
||||
AttributeSchema {
|
||||
name: "creation_date".to_owned(),
|
||||
attribute_type: AttributeType::DateTime,
|
||||
is_list: false,
|
||||
is_visible: true,
|
||||
is_editable: false,
|
||||
is_hardcoded: true,
|
||||
},
|
||||
AttributeSchema {
|
||||
name: "uuid".to_owned(),
|
||||
attribute_type: AttributeType::String,
|
||||
is_list: false,
|
||||
is_visible: true,
|
||||
is_editable: false,
|
||||
is_hardcoded: true,
|
||||
},
|
||||
AttributeSchema {
|
||||
name: "display_name".to_owned(),
|
||||
attribute_type: AttributeType::String,
|
||||
is_list: false,
|
||||
is_visible: true,
|
||||
is_editable: true,
|
||||
is_hardcoded: true,
|
||||
},
|
||||
]);
|
||||
schema
|
||||
.group_attributes
|
||||
.attributes
|
||||
.sort_by(|a, b| a.name.cmp(&b.name));
|
||||
PublicSchema(schema)
|
||||
}
|
||||
}
|
||||
@@ -53,6 +53,7 @@ pub(crate) fn error_to_http_response(error: TcpError) -> HttpResponse {
|
||||
HttpResponse::Unauthorized()
|
||||
}
|
||||
DomainError::DatabaseError(_)
|
||||
| DomainError::DatabaseTransactionError(_)
|
||||
| DomainError::InternalError(_)
|
||||
| DomainError::UnknownCryptoError(_) => HttpResponse::InternalServerError(),
|
||||
DomainError::Base64DecodeError(_)
|
||||
@@ -86,7 +87,7 @@ fn http_config<Backend>(
|
||||
backend_handler: Backend,
|
||||
jwt_secret: secstr::SecUtf8,
|
||||
jwt_blacklist: HashSet<u64>,
|
||||
server_url: String,
|
||||
server_url: url::Url,
|
||||
mail_options: MailOptions,
|
||||
) where
|
||||
Backend: TcpBackendHandler + BackendHandler + LoginHandler + OpaqueHandler + Clone + 'static,
|
||||
@@ -131,7 +132,7 @@ pub(crate) struct AppState<Backend> {
|
||||
pub backend_handler: AccessControlledBackendHandler<Backend>,
|
||||
pub jwt_key: Hmac<Sha512>,
|
||||
pub jwt_blacklist: RwLock<HashSet<u64>>,
|
||||
pub server_url: String,
|
||||
pub server_url: url::Url,
|
||||
pub mail_options: MailOptions,
|
||||
}
|
||||
|
||||
|
||||
100
server/src/infra/test_utils.rs
Normal file
100
server/src/infra/test_utils.rs
Normal file
@@ -0,0 +1,100 @@
|
||||
use crate::domain::{error::Result, handler::*, opaque_handler::*, types::*};
|
||||
|
||||
use async_trait::async_trait;
|
||||
use std::collections::HashSet;
|
||||
|
||||
mockall::mock! {
|
||||
pub TestBackendHandler{}
|
||||
impl Clone for TestBackendHandler {
|
||||
fn clone(&self) -> Self;
|
||||
}
|
||||
#[async_trait]
|
||||
impl LoginHandler for TestBackendHandler {
|
||||
async fn bind(&self, request: BindRequest) -> Result<()>;
|
||||
}
|
||||
#[async_trait]
|
||||
impl GroupListerBackendHandler for TestBackendHandler {
|
||||
async fn list_groups(&self, filters: Option<GroupRequestFilter>) -> Result<Vec<Group>>;
|
||||
}
|
||||
#[async_trait]
|
||||
impl GroupBackendHandler for TestBackendHandler {
|
||||
async fn get_group_details(&self, group_id: GroupId) -> Result<GroupDetails>;
|
||||
async fn update_group(&self, request: UpdateGroupRequest) -> Result<()>;
|
||||
async fn create_group(&self, group_name: &str) -> Result<GroupId>;
|
||||
async fn delete_group(&self, group_id: GroupId) -> Result<()>;
|
||||
}
|
||||
#[async_trait]
|
||||
impl UserListerBackendHandler for TestBackendHandler {
|
||||
async fn list_users(&self, filters: Option<UserRequestFilter>, get_groups: bool) -> Result<Vec<UserAndGroups>>;
|
||||
}
|
||||
#[async_trait]
|
||||
impl UserBackendHandler for TestBackendHandler {
|
||||
async fn get_user_details(&self, user_id: &UserId) -> Result<User>;
|
||||
async fn create_user(&self, request: CreateUserRequest) -> Result<()>;
|
||||
async fn update_user(&self, request: UpdateUserRequest) -> Result<()>;
|
||||
async fn delete_user(&self, user_id: &UserId) -> Result<()>;
|
||||
async fn get_user_groups(&self, user_id: &UserId) -> Result<HashSet<GroupDetails>>;
|
||||
async fn add_user_to_group(&self, user_id: &UserId, group_id: GroupId) -> Result<()>;
|
||||
async fn remove_user_from_group(&self, user_id: &UserId, group_id: GroupId) -> Result<()>;
|
||||
}
|
||||
#[async_trait]
|
||||
impl SchemaBackendHandler for TestBackendHandler {
|
||||
async fn get_schema(&self) -> Result<Schema>;
|
||||
}
|
||||
#[async_trait]
|
||||
impl BackendHandler for TestBackendHandler {}
|
||||
#[async_trait]
|
||||
impl OpaqueHandler for TestBackendHandler {
|
||||
async fn login_start(
|
||||
&self,
|
||||
request: login::ClientLoginStartRequest
|
||||
) -> Result<login::ServerLoginStartResponse>;
|
||||
async fn login_finish(&self, request: login::ClientLoginFinishRequest) -> Result<UserId>;
|
||||
async fn registration_start(
|
||||
&self,
|
||||
request: registration::ClientRegistrationStartRequest
|
||||
) -> Result<registration::ServerRegistrationStartResponse>;
|
||||
async fn registration_finish(
|
||||
&self,
|
||||
request: registration::ClientRegistrationFinishRequest
|
||||
) -> Result<()>;
|
||||
}
|
||||
}
|
||||
|
||||
pub fn setup_default_schema(mock: &mut MockTestBackendHandler) {
|
||||
mock.expect_get_schema().returning(|| {
|
||||
Ok(Schema {
|
||||
user_attributes: AttributeList {
|
||||
attributes: vec![
|
||||
AttributeSchema {
|
||||
name: "avatar".to_owned(),
|
||||
attribute_type: AttributeType::JpegPhoto,
|
||||
is_list: false,
|
||||
is_visible: true,
|
||||
is_editable: true,
|
||||
is_hardcoded: true,
|
||||
},
|
||||
AttributeSchema {
|
||||
name: "first_name".to_owned(),
|
||||
attribute_type: AttributeType::String,
|
||||
is_list: false,
|
||||
is_visible: true,
|
||||
is_editable: true,
|
||||
is_hardcoded: true,
|
||||
},
|
||||
AttributeSchema {
|
||||
name: "last_name".to_owned(),
|
||||
attribute_type: AttributeType::String,
|
||||
is_list: false,
|
||||
is_visible: true,
|
||||
is_editable: true,
|
||||
is_hardcoded: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
group_attributes: AttributeList {
|
||||
attributes: Vec::new(),
|
||||
},
|
||||
})
|
||||
});
|
||||
}
|
||||
@@ -9,7 +9,7 @@ use crate::{
|
||||
domain::{
|
||||
handler::{
|
||||
CreateUserRequest, GroupBackendHandler, GroupListerBackendHandler, GroupRequestFilter,
|
||||
UserBackendHandler,
|
||||
UserBackendHandler, UserListerBackendHandler, UserRequestFilter,
|
||||
},
|
||||
sql_backend_handler::SqlBackendHandler,
|
||||
sql_opaque_handler::register_password,
|
||||
@@ -89,8 +89,19 @@ async fn set_up_server(config: Configuration) -> Result<ServerBuilder> {
|
||||
ensure_group_exists(&backend_handler, "lldap_admin").await?;
|
||||
ensure_group_exists(&backend_handler, "lldap_password_manager").await?;
|
||||
ensure_group_exists(&backend_handler, "lldap_strict_readonly").await?;
|
||||
if let Err(e) = backend_handler.get_user_details(&config.ldap_user_dn).await {
|
||||
warn!("Could not get admin user, trying to create it: {:#}", e);
|
||||
let admin_present = if let Ok(admins) = backend_handler
|
||||
.list_users(
|
||||
Some(UserRequestFilter::MemberOf("lldap_admin".to_owned())),
|
||||
false,
|
||||
)
|
||||
.await
|
||||
{
|
||||
!admins.is_empty()
|
||||
} else {
|
||||
false
|
||||
};
|
||||
if !admin_present {
|
||||
warn!("Could not find an admin user, trying to create the user \"admin\" with the config-provided password");
|
||||
create_admin_user(&backend_handler, &config)
|
||||
.await
|
||||
.map_err(|e| anyhow!("Error setting up admin login/account: {:#}", e))
|
||||
|
||||
@@ -2,6 +2,7 @@
|
||||
use std::env::var;
|
||||
|
||||
pub const DB_KEY: &str = "LLDAP_DATABASE_URL";
|
||||
pub const PRIVATE_KEY_SEED: &str = "LLDAP_KEY_SEED";
|
||||
|
||||
pub fn database_url() -> String {
|
||||
let url = var(DB_KEY).ok();
|
||||
|
||||
@@ -191,7 +191,6 @@ impl Drop for LLDAPFixture {
|
||||
|
||||
for _ in 0..10 {
|
||||
let status = self.child.try_wait();
|
||||
if status.is_err() {}
|
||||
match status {
|
||||
Err(e) => {
|
||||
println!(
|
||||
@@ -236,5 +235,6 @@ fn create_lldap_command() -> Command {
|
||||
let db_url = env::database_url();
|
||||
cmd.current_dir(path);
|
||||
cmd.env(env::DB_KEY, db_url);
|
||||
cmd.env(env::PRIVATE_KEY_SEED, "Random value");
|
||||
cmd
|
||||
}
|
||||
|
||||
@@ -11,7 +11,7 @@ version = "0.1.0"
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1"
|
||||
anyhow = "*"
|
||||
rand = "0.8"
|
||||
serde = "1"
|
||||
serde_json = "1"
|
||||
@@ -21,10 +21,10 @@ features = ["std", "color", "suggestions", "derive", "env"]
|
||||
version = "4"
|
||||
|
||||
[dependencies.lldap_auth]
|
||||
version = "0.3"
|
||||
path = "../auth"
|
||||
features = ["opaque_client"]
|
||||
|
||||
[dependencies.reqwest]
|
||||
version = "0.11"
|
||||
version = "*"
|
||||
default-features = false
|
||||
features = ["json", "blocking", "rustls-tls"]
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use anyhow::{bail, ensure, Context, Result};
|
||||
use clap::Parser;
|
||||
use lldap_auth::{opaque, registration};
|
||||
use reqwest::Url;
|
||||
use serde::Serialize;
|
||||
|
||||
/// Set the password for a user in LLDAP.
|
||||
@@ -8,7 +9,7 @@ use serde::Serialize;
|
||||
pub struct CliOpts {
|
||||
/// Base LLDAP url, e.g. "https://lldap/".
|
||||
#[clap(short, long)]
|
||||
pub base_url: String,
|
||||
pub base_url: Url,
|
||||
|
||||
/// Admin username.
|
||||
#[clap(long, default_value = "admin")]
|
||||
@@ -31,10 +32,16 @@ pub struct CliOpts {
|
||||
pub password: String,
|
||||
}
|
||||
|
||||
fn get_token(base_url: &str, username: &str, password: &str) -> Result<String> {
|
||||
fn append_to_url(base_url: &Url, path: &str) -> Url {
|
||||
let mut new_url = base_url.clone();
|
||||
new_url.path_segments_mut().unwrap().extend(path.split('/'));
|
||||
new_url
|
||||
}
|
||||
|
||||
fn get_token(base_url: &Url, username: &str, password: &str) -> Result<String> {
|
||||
let client = reqwest::blocking::Client::new();
|
||||
let response = client
|
||||
.post(format!("{base_url}/auth/simple/login"))
|
||||
.post(append_to_url(base_url, "auth/simple/login"))
|
||||
.header(reqwest::header::CONTENT_TYPE, "application/json")
|
||||
.body(
|
||||
serde_json::to_string(&lldap_auth::login::ClientSimpleLoginRequest {
|
||||
@@ -48,7 +55,7 @@ fn get_token(base_url: &str, username: &str, password: &str) -> Result<String> {
|
||||
Ok(serde_json::from_str::<lldap_auth::login::ServerLoginResponse>(&response.text()?)?.token)
|
||||
}
|
||||
|
||||
fn call_server(url: &str, token: &str, body: impl Serialize) -> Result<String> {
|
||||
fn call_server(url: Url, token: &str, body: impl Serialize) -> Result<String> {
|
||||
let client = reqwest::blocking::Client::new();
|
||||
let request = client
|
||||
.post(url)
|
||||
@@ -60,13 +67,13 @@ fn call_server(url: &str, token: &str, body: impl Serialize) -> Result<String> {
|
||||
}
|
||||
|
||||
pub fn register_start(
|
||||
base_url: &str,
|
||||
base_url: &Url,
|
||||
token: &str,
|
||||
request: registration::ClientRegistrationStartRequest,
|
||||
) -> Result<registration::ServerRegistrationStartResponse> {
|
||||
let request = Some(request);
|
||||
let data = call_server(
|
||||
&format!("{base_url}/auth/opaque/register/start"),
|
||||
append_to_url(base_url, "auth/opaque/register/start"),
|
||||
token,
|
||||
request,
|
||||
)?;
|
||||
@@ -74,13 +81,13 @@ pub fn register_start(
|
||||
}
|
||||
|
||||
pub fn register_finish(
|
||||
base_url: &str,
|
||||
base_url: &Url,
|
||||
token: &str,
|
||||
request: registration::ClientRegistrationFinishRequest,
|
||||
) -> Result<()> {
|
||||
let request = Some(request);
|
||||
call_server(
|
||||
&format!("{base_url}/auth/opaque/register/finish"),
|
||||
append_to_url(base_url, "auth/opaque/register/finish"),
|
||||
token,
|
||||
request,
|
||||
)
|
||||
@@ -94,7 +101,7 @@ fn main() -> Result<()> {
|
||||
"New password is too short, expected at least 8 characters"
|
||||
);
|
||||
ensure!(
|
||||
opts.base_url.starts_with("http://") || opts.base_url.starts_with("https://"),
|
||||
opts.base_url.scheme() == "http" || opts.base_url.scheme() == "https",
|
||||
"Base URL should start with `http://` or `https://`"
|
||||
);
|
||||
let token = match (opts.token.as_ref(), opts.admin_password.as_ref()) {
|
||||
@@ -107,7 +114,7 @@ fn main() -> Result<()> {
|
||||
|
||||
let mut rng = rand::rngs::OsRng;
|
||||
let registration_start_request =
|
||||
opaque::client::registration::start_registration(&opts.password, &mut rng)
|
||||
opaque::client::registration::start_registration(opts.password.as_bytes(), &mut rng)
|
||||
.context("Could not initiate password change")?;
|
||||
let start_request = registration::ClientRegistrationStartRequest {
|
||||
username: opts.username.to_string(),
|
||||
|
||||
Reference in New Issue
Block a user