Compare commits
4 Commits
server-use
...
crates
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
1b2dfbe52e | ||
|
|
e1aa2bfb18 | ||
|
|
1377d5aed9 | ||
|
|
fa0185af5e |
@@ -1,9 +1,7 @@
|
||||
FROM rust:1.72
|
||||
FROM rust:1.66
|
||||
|
||||
ARG USERNAME=lldapdev
|
||||
# We need to keep the user as 1001 to match the GitHub runner's UID.
|
||||
# See https://github.com/actions/checkout/issues/956.
|
||||
ARG USER_UID=1001
|
||||
ARG USER_UID=1000
|
||||
ARG USER_GID=$USER_UID
|
||||
|
||||
# Create the user
|
||||
@@ -23,4 +21,4 @@ RUN RUSTFLAGS=-Ctarget-feature=-crt-static cargo install wasm-pack \
|
||||
|
||||
USER $USERNAME
|
||||
ENV CARGO_HOME=/home/$USERNAME/.cargo
|
||||
ENV SHELL=/bin/bash
|
||||
ENV SHELL=/bin/bash
|
||||
2
.gitattributes
vendored
2
.gitattributes
vendored
@@ -1,4 +1,4 @@
|
||||
example_configs/** linguist-documentation
|
||||
example-configs/** linguist-documentation
|
||||
docs/** linguist-documentation
|
||||
*.md linguist-documentation
|
||||
lldap_config.docker_template.toml linguist-documentation
|
||||
|
||||
5
.github/FUNDING.yml
vendored
5
.github/FUNDING.yml
vendored
@@ -1,5 +0,0 @@
|
||||
# These are supported funding model platforms
|
||||
|
||||
github: [lldap]
|
||||
|
||||
custom: ['https://bmc.link/nitnelave']
|
||||
29
.github/ISSUE_TEMPLATE/bug_report.md
vendored
29
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -1,29 +0,0 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: "[BUG]"
|
||||
labels: bug
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Describe the bug**
|
||||
A clear and concise description of what the bug is.
|
||||
|
||||
**To Reproduce**
|
||||
Steps to reproduce the behavior:
|
||||
1. Go to '...'
|
||||
2. Click on '....'
|
||||
3. Scroll down to '....'
|
||||
4. See error
|
||||
|
||||
**Expected behavior**
|
||||
A clear and concise description of what you expected to happen.
|
||||
|
||||
**Logs**
|
||||
If applicable, add logs to explain the problem.
|
||||
LLDAP should be started in verbose mode (`LLDAP_VERBOSE=true` env variable, or `verbose = true` in the config). Include the logs in triple-backtick "```"
|
||||
If integrating with another service, please add its configuration (paste it or screenshot it) as well as any useful logs or screenshots (showing the error, for instance).
|
||||
|
||||
**Additional context**
|
||||
Add any other context about the problem here.
|
||||
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
20
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@@ -1,20 +0,0 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: "[FEATURE REQUEST]"
|
||||
labels: enhancement
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Motivation**
|
||||
Why do you want the feature? What problem do you have, what use cases would it enable?
|
||||
|
||||
**Describe the solution you'd like**
|
||||
A clear and concise description of what you want to happen.
|
||||
|
||||
**Describe alternatives you've considered**
|
||||
A clear and concise description of any alternative solutions or features you've considered. You can include workarounds that are currently possible.
|
||||
|
||||
**Additional context**
|
||||
Add any other context or screenshots about the feature request here.
|
||||
25
.github/ISSUE_TEMPLATE/integration-request.md
vendored
25
.github/ISSUE_TEMPLATE/integration-request.md
vendored
@@ -1,25 +0,0 @@
|
||||
---
|
||||
name: Integration request
|
||||
about: Request for integration with a service
|
||||
title: "[INTEGRATION]"
|
||||
labels: integration
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
**Checklist**
|
||||
- [ ] Check if there is already an [example config](https://github.com/lldap/lldap/tree/main/example_configs) for it.
|
||||
- [ ] Try to figure out the configuration values for the new service yourself.
|
||||
- You can use other example configs for inspiration.
|
||||
- If you're having trouble, you can ask on [Discord](https://discord.gg/h5PEdRMNyP) or create an issue.
|
||||
- If you succeed, make sure to contribute an example configuration, or a configuration guide.
|
||||
- If you hit a block because of an unimplemented feature, create an issue.
|
||||
|
||||
**Description of the service**
|
||||
Quick summary of what the service is and how it's using LDAP. Link to the service's documentation on configuring LDAP.
|
||||
|
||||
**What you've tried**
|
||||
A sample configuration that you've tried.
|
||||
|
||||
**What's not working**
|
||||
Error logs, error screenshots, features that are not working, missing features.
|
||||
94
.github/workflows/Dockerfile.ci.alpine
vendored
94
.github/workflows/Dockerfile.ci.alpine
vendored
@@ -1,6 +1,72 @@
|
||||
FROM localhost:5000/lldap/lldap:alpine-base
|
||||
# Taken directly from https://github.com/tianon/gosu/blob/master/INSTALL.md
|
||||
ENV GOSU_VERSION 1.17
|
||||
FROM debian:bullseye AS lldap
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG TARGETPLATFORM
|
||||
RUN apt update && apt install -y wget
|
||||
WORKDIR /dim
|
||||
COPY bin/ bin/
|
||||
COPY web/ web/
|
||||
|
||||
RUN mkdir -p target/
|
||||
RUN mkdir -p /lldap/app
|
||||
|
||||
RUN if [ "${TARGETPLATFORM}" = "linux/amd64" ]; then \
|
||||
mv bin/x86_64-unknown-linux-musl-lldap-bin/lldap target/lldap && \
|
||||
mv bin/x86_64-unknown-linux-musl-lldap_migration_tool-bin/lldap_migration_tool target/lldap_migration_tool && \
|
||||
mv bin/x86_64-unknown-linux-musl-lldap_set_password-bin/lldap_set_password target/lldap_set_password && \
|
||||
chmod +x target/lldap && \
|
||||
chmod +x target/lldap_migration_tool && \
|
||||
chmod +x target/lldap_set_password && \
|
||||
ls -la target/ . && \
|
||||
pwd \
|
||||
; fi
|
||||
|
||||
RUN if [ "${TARGETPLATFORM}" = "linux/arm64" ]; then \
|
||||
mv bin/aarch64-unknown-linux-musl-lldap-bin/lldap target/lldap && \
|
||||
mv bin/aarch64-unknown-linux-musl-lldap_migration_tool-bin/lldap_migration_tool target/lldap_migration_tool && \
|
||||
mv bin/aarch64-unknown-linux-musl-lldap_set_password-bin/lldap_set_password target/lldap_set_password && \
|
||||
chmod +x target/lldap && \
|
||||
chmod +x target/lldap_migration_tool && \
|
||||
chmod +x target/lldap_set_password && \
|
||||
ls -la target/ . && \
|
||||
pwd \
|
||||
; fi
|
||||
|
||||
RUN if [ "${TARGETPLATFORM}" = "linux/arm/v7" ]; then \
|
||||
mv bin/armv7-unknown-linux-gnueabihf-lldap-bin/lldap target/lldap && \
|
||||
mv bin/armv7-unknown-linux-gnueabihf-lldap_migration_tool-bin/lldap_migration_tool target/lldap_migration_tool && \
|
||||
mv bin/armv7-unknown-linux-gnueabihf-lldap_set_password-bin/lldap_set_password target/lldap_set_password && \
|
||||
chmod +x target/lldap && \
|
||||
chmod +x target/lldap_migration_tool && \
|
||||
chmod +x target/lldap_set_password && \
|
||||
ls -la target/ . && \
|
||||
pwd \
|
||||
; fi
|
||||
|
||||
# Web and App dir
|
||||
COPY docker-entrypoint.sh /docker-entrypoint.sh
|
||||
COPY lldap_config.docker_template.toml /lldap/
|
||||
COPY web/index_local.html web/index.html
|
||||
RUN cp target/lldap /lldap/ && \
|
||||
cp target/lldap_migration_tool /lldap/ && \
|
||||
cp target/lldap_set_password /lldap/ && \
|
||||
cp -R web/index.html \
|
||||
web/pkg \
|
||||
web/static \
|
||||
/lldap/app/
|
||||
|
||||
WORKDIR /lldap
|
||||
RUN set -x \
|
||||
&& for file in $(cat /lldap/app/static/libraries.txt); do wget -P app/static "$file"; done \
|
||||
&& for file in $(cat /lldap/app/static/fonts/fonts.txt); do wget -P app/static/fonts "$file"; done \
|
||||
&& chmod a+r -R .
|
||||
|
||||
FROM alpine:3.16
|
||||
WORKDIR /app
|
||||
ENV UID=1000
|
||||
ENV GID=1000
|
||||
ENV USER=lldap
|
||||
ENV GOSU_VERSION 1.14
|
||||
# Fetch gosu from git
|
||||
RUN set -eux; \
|
||||
\
|
||||
apk add --no-cache --virtual .gosu-deps \
|
||||
@@ -17,7 +83,7 @@ RUN set -eux; \
|
||||
export GNUPGHOME="$(mktemp -d)"; \
|
||||
gpg --batch --keyserver hkps://keys.openpgp.org --recv-keys B42F6819007F00F88E364FD4036A9C25BF357DD4; \
|
||||
gpg --batch --verify /usr/local/bin/gosu.asc /usr/local/bin/gosu; \
|
||||
gpgconf --kill all; \
|
||||
command -v gpgconf && gpgconf --kill all || :; \
|
||||
rm -rf "$GNUPGHOME" /usr/local/bin/gosu.asc; \
|
||||
\
|
||||
# clean up fetch dependencies
|
||||
@@ -27,4 +93,22 @@ RUN set -eux; \
|
||||
# verify that the binary works
|
||||
gosu --version; \
|
||||
gosu nobody true
|
||||
COPY --chown=$USER:$USER docker-entrypoint.sh /docker-entrypoint.sh
|
||||
RUN apk add --no-cache tini ca-certificates bash tzdata && \
|
||||
addgroup -g $GID $USER && \
|
||||
adduser \
|
||||
--disabled-password \
|
||||
--gecos "" \
|
||||
--home "$(pwd)" \
|
||||
--ingroup "$USER" \
|
||||
--no-create-home \
|
||||
--uid "$UID" \
|
||||
"$USER" && \
|
||||
mkdir -p /data && \
|
||||
chown $USER:$USER /data
|
||||
COPY --from=lldap --chown=$USER:$USER /lldap /app
|
||||
COPY --from=lldap --chown=$USER:$USER /docker-entrypoint.sh /docker-entrypoint.sh
|
||||
VOLUME ["/data"]
|
||||
WORKDIR /app
|
||||
ENTRYPOINT ["tini", "--", "/docker-entrypoint.sh"]
|
||||
CMD ["run", "--config-file", "/data/lldap_config.toml"]
|
||||
HEALTHCHECK CMD ["/app/lldap", "healthcheck", "--config-file", "/data/lldap_config.toml"]
|
||||
|
||||
84
.github/workflows/Dockerfile.ci.alpine-base
vendored
84
.github/workflows/Dockerfile.ci.alpine-base
vendored
@@ -1,84 +0,0 @@
|
||||
FROM debian:bullseye AS lldap
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG TARGETPLATFORM
|
||||
RUN apt update && apt install -y wget
|
||||
WORKDIR /dim
|
||||
COPY bin/ bin/
|
||||
COPY web/ web/
|
||||
|
||||
RUN mkdir -p target/
|
||||
RUN mkdir -p /lldap/app
|
||||
|
||||
RUN if [ "${TARGETPLATFORM}" = "linux/amd64" ]; then \
|
||||
mv bin/x86_64-unknown-linux-musl-lldap-bin/lldap target/lldap && \
|
||||
mv bin/x86_64-unknown-linux-musl-lldap_migration_tool-bin/lldap_migration_tool target/lldap_migration_tool && \
|
||||
mv bin/x86_64-unknown-linux-musl-lldap_set_password-bin/lldap_set_password target/lldap_set_password && \
|
||||
chmod +x target/lldap && \
|
||||
chmod +x target/lldap_migration_tool && \
|
||||
chmod +x target/lldap_set_password && \
|
||||
ls -la target/ . && \
|
||||
pwd \
|
||||
; fi
|
||||
|
||||
RUN if [ "${TARGETPLATFORM}" = "linux/arm64" ]; then \
|
||||
mv bin/aarch64-unknown-linux-musl-lldap-bin/lldap target/lldap && \
|
||||
mv bin/aarch64-unknown-linux-musl-lldap_migration_tool-bin/lldap_migration_tool target/lldap_migration_tool && \
|
||||
mv bin/aarch64-unknown-linux-musl-lldap_set_password-bin/lldap_set_password target/lldap_set_password && \
|
||||
chmod +x target/lldap && \
|
||||
chmod +x target/lldap_migration_tool && \
|
||||
chmod +x target/lldap_set_password && \
|
||||
ls -la target/ . && \
|
||||
pwd \
|
||||
; fi
|
||||
|
||||
RUN if [ "${TARGETPLATFORM}" = "linux/arm/v7" ]; then \
|
||||
mv bin/armv7-unknown-linux-musleabihf-lldap-bin/lldap target/lldap && \
|
||||
mv bin/armv7-unknown-linux-musleabihf-lldap_migration_tool-bin/lldap_migration_tool target/lldap_migration_tool && \
|
||||
mv bin/armv7-unknown-linux-musleabihf-lldap_set_password-bin/lldap_set_password target/lldap_set_password && \
|
||||
chmod +x target/lldap && \
|
||||
chmod +x target/lldap_migration_tool && \
|
||||
chmod +x target/lldap_set_password && \
|
||||
ls -la target/ . && \
|
||||
pwd \
|
||||
; fi
|
||||
|
||||
# Web and App dir
|
||||
COPY lldap_config.docker_template.toml /lldap/
|
||||
COPY web/index_local.html web/index.html
|
||||
RUN cp target/lldap /lldap/ && \
|
||||
cp target/lldap_migration_tool /lldap/ && \
|
||||
cp target/lldap_set_password /lldap/ && \
|
||||
cp -R web/index.html \
|
||||
web/pkg \
|
||||
web/static \
|
||||
/lldap/app/
|
||||
|
||||
WORKDIR /lldap
|
||||
RUN set -x \
|
||||
&& for file in $(cat /lldap/app/static/libraries.txt); do wget -P app/static "$file"; done \
|
||||
&& for file in $(cat /lldap/app/static/fonts/fonts.txt); do wget -P app/static/fonts "$file"; done \
|
||||
&& chmod a+r -R .
|
||||
|
||||
FROM alpine:3.16
|
||||
WORKDIR /app
|
||||
ENV UID=1000
|
||||
ENV GID=1000
|
||||
ENV USER=lldap
|
||||
RUN apk add --no-cache tini ca-certificates bash tzdata && \
|
||||
addgroup -g $GID $USER && \
|
||||
adduser \
|
||||
--disabled-password \
|
||||
--gecos "" \
|
||||
--home "$(pwd)" \
|
||||
--ingroup "$USER" \
|
||||
--no-create-home \
|
||||
--uid "$UID" \
|
||||
"$USER" && \
|
||||
mkdir -p /data && \
|
||||
chown $USER:$USER /data
|
||||
COPY --from=lldap --chown=$USER:$USER /lldap /app
|
||||
VOLUME ["/data"]
|
||||
HEALTHCHECK CMD ["/app/lldap", "healthcheck", "--config-file", "/data/lldap_config.toml"]
|
||||
WORKDIR /app
|
||||
ENTRYPOINT ["tini", "--", "/docker-entrypoint.sh"]
|
||||
CMD ["run", "--config-file", "/data/lldap_config.toml"]
|
||||
@@ -1,3 +0,0 @@
|
||||
FROM localhost:5000/lldap/lldap:alpine-base
|
||||
COPY --chown=$USER:$USER docker-entrypoint-rootless.sh /docker-entrypoint.sh
|
||||
USER $USER
|
||||
110
.github/workflows/Dockerfile.ci.debian
vendored
110
.github/workflows/Dockerfile.ci.debian
vendored
@@ -1,31 +1,79 @@
|
||||
FROM localhost:5000/lldap/lldap:debian-base
|
||||
# Taken directly from https://github.com/tianon/gosu/blob/master/INSTALL.md
|
||||
ENV GOSU_VERSION 1.17
|
||||
RUN set -eux; \
|
||||
# save list of currently installed packages for later so we can clean up
|
||||
savedAptMark="$(apt-mark showmanual)"; \
|
||||
apt-get update; \
|
||||
apt-get install -y --no-install-recommends ca-certificates gnupg wget; \
|
||||
rm -rf /var/lib/apt/lists/*; \
|
||||
\
|
||||
dpkgArch="$(dpkg --print-architecture | awk -F- '{ print $NF }')"; \
|
||||
wget -O /usr/local/bin/gosu "https://github.com/tianon/gosu/releases/download/$GOSU_VERSION/gosu-$dpkgArch"; \
|
||||
wget -O /usr/local/bin/gosu.asc "https://github.com/tianon/gosu/releases/download/$GOSU_VERSION/gosu-$dpkgArch.asc"; \
|
||||
\
|
||||
# verify the signature
|
||||
export GNUPGHOME="$(mktemp -d)"; \
|
||||
gpg --batch --keyserver hkps://keys.openpgp.org --recv-keys B42F6819007F00F88E364FD4036A9C25BF357DD4; \
|
||||
gpg --batch --verify /usr/local/bin/gosu.asc /usr/local/bin/gosu; \
|
||||
gpgconf --kill all; \
|
||||
rm -rf "$GNUPGHOME" /usr/local/bin/gosu.asc; \
|
||||
\
|
||||
# clean up fetch dependencies
|
||||
apt-mark auto '.*' > /dev/null; \
|
||||
[ -z "$savedAptMark" ] || apt-mark manual $savedAptMark; \
|
||||
apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false; \
|
||||
\
|
||||
chmod +x /usr/local/bin/gosu; \
|
||||
# verify that the binary works
|
||||
gosu --version; \
|
||||
gosu nobody true
|
||||
COPY --chown=$USER:$USER docker-entrypoint.sh /docker-entrypoint.sh
|
||||
FROM debian:bullseye AS lldap
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG TARGETPLATFORM
|
||||
RUN apt update && apt install -y wget
|
||||
WORKDIR /dim
|
||||
COPY bin/ bin/
|
||||
COPY web/ web/
|
||||
|
||||
RUN mkdir -p target/
|
||||
RUN mkdir -p /lldap/app
|
||||
|
||||
RUN if [ "${TARGETPLATFORM}" = "linux/amd64" ]; then \
|
||||
mv bin/x86_64-unknown-linux-musl-lldap-bin/lldap target/lldap && \
|
||||
mv bin/x86_64-unknown-linux-musl-lldap_migration_tool-bin/lldap_migration_tool target/lldap_migration_tool && \
|
||||
mv bin/x86_64-unknown-linux-musl-lldap_set_password-bin/lldap_set_password target/lldap_set_password && \
|
||||
chmod +x target/lldap && \
|
||||
chmod +x target/lldap_migration_tool && \
|
||||
chmod +x target/lldap_set_password && \
|
||||
ls -la target/ . && \
|
||||
pwd \
|
||||
; fi
|
||||
|
||||
RUN if [ "${TARGETPLATFORM}" = "linux/arm64" ]; then \
|
||||
mv bin/aarch64-unknown-linux-musl-lldap-bin/lldap target/lldap && \
|
||||
mv bin/aarch64-unknown-linux-musl-lldap_migration_tool-bin/lldap_migration_tool target/lldap_migration_tool && \
|
||||
mv bin/aarch64-unknown-linux-musl-lldap_set_password-bin/lldap_set_password target/lldap_set_password && \
|
||||
chmod +x target/lldap && \
|
||||
chmod +x target/lldap_migration_tool && \
|
||||
chmod +x target/lldap_set_password && \
|
||||
ls -la target/ . && \
|
||||
pwd \
|
||||
; fi
|
||||
|
||||
RUN if [ "${TARGETPLATFORM}" = "linux/arm/v7" ]; then \
|
||||
mv bin/armv7-unknown-linux-gnueabihf-lldap-bin/lldap target/lldap && \
|
||||
mv bin/armv7-unknown-linux-gnueabihf-lldap_migration_tool-bin/lldap_migration_tool target/lldap_migration_tool && \
|
||||
mv bin/armv7-unknown-linux-gnueabihf-lldap_set_password-bin/lldap_set_password target/lldap_set_password && \
|
||||
chmod +x target/lldap && \
|
||||
chmod +x target/lldap_migration_tool && \
|
||||
chmod +x target/lldap_set_password && \
|
||||
ls -la target/ . && \
|
||||
pwd \
|
||||
; fi
|
||||
|
||||
# Web and App dir
|
||||
COPY docker-entrypoint.sh /docker-entrypoint.sh
|
||||
COPY lldap_config.docker_template.toml /lldap/
|
||||
COPY web/index_local.html web/index.html
|
||||
RUN cp target/lldap /lldap/ && \
|
||||
cp target/lldap_migration_tool /lldap/ && \
|
||||
cp target/lldap_set_password /lldap/ && \
|
||||
cp -R web/index.html \
|
||||
web/pkg \
|
||||
web/static \
|
||||
/lldap/app/
|
||||
|
||||
WORKDIR /lldap
|
||||
RUN set -x \
|
||||
&& for file in $(cat /lldap/app/static/libraries.txt); do wget -P app/static "$file"; done \
|
||||
&& for file in $(cat /lldap/app/static/fonts/fonts.txt); do wget -P app/static/fonts "$file"; done \
|
||||
&& chmod a+r -R .
|
||||
|
||||
FROM debian:bullseye-slim
|
||||
ENV UID=1000
|
||||
ENV GID=1000
|
||||
ENV USER=lldap
|
||||
RUN apt update && \
|
||||
apt install -y --no-install-recommends tini openssl ca-certificates gosu tzdata && \
|
||||
apt clean && \
|
||||
rm -rf /var/lib/apt/lists/* && \
|
||||
groupadd -g $GID $USER && useradd --system -m -g $USER --uid $UID $USER && \
|
||||
mkdir -p /data && chown $USER:$USER /data
|
||||
COPY --from=lldap --chown=$USER:$USER /lldap /app
|
||||
COPY --from=lldap --chown=$USER:$USER /docker-entrypoint.sh /docker-entrypoint.sh
|
||||
VOLUME ["/data"]
|
||||
WORKDIR /app
|
||||
ENTRYPOINT ["tini", "--", "/docker-entrypoint.sh"]
|
||||
CMD ["run", "--config-file", "/data/lldap_config.toml"]
|
||||
HEALTHCHECK CMD ["/app/lldap", "healthcheck", "--config-file", "/data/lldap_config.toml"]
|
||||
|
||||
79
.github/workflows/Dockerfile.ci.debian-base
vendored
79
.github/workflows/Dockerfile.ci.debian-base
vendored
@@ -1,79 +0,0 @@
|
||||
FROM debian:bullseye AS lldap
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
ARG TARGETPLATFORM
|
||||
RUN apt update && apt install -y wget
|
||||
WORKDIR /dim
|
||||
COPY bin/ bin/
|
||||
COPY web/ web/
|
||||
|
||||
RUN mkdir -p target/
|
||||
RUN mkdir -p /lldap/app
|
||||
|
||||
RUN if [ "${TARGETPLATFORM}" = "linux/amd64" ]; then \
|
||||
mv bin/x86_64-unknown-linux-musl-lldap-bin/lldap target/lldap && \
|
||||
mv bin/x86_64-unknown-linux-musl-lldap_migration_tool-bin/lldap_migration_tool target/lldap_migration_tool && \
|
||||
mv bin/x86_64-unknown-linux-musl-lldap_set_password-bin/lldap_set_password target/lldap_set_password && \
|
||||
chmod +x target/lldap && \
|
||||
chmod +x target/lldap_migration_tool && \
|
||||
chmod +x target/lldap_set_password && \
|
||||
ls -la target/ . && \
|
||||
pwd \
|
||||
; fi
|
||||
|
||||
RUN if [ "${TARGETPLATFORM}" = "linux/arm64" ]; then \
|
||||
mv bin/aarch64-unknown-linux-musl-lldap-bin/lldap target/lldap && \
|
||||
mv bin/aarch64-unknown-linux-musl-lldap_migration_tool-bin/lldap_migration_tool target/lldap_migration_tool && \
|
||||
mv bin/aarch64-unknown-linux-musl-lldap_set_password-bin/lldap_set_password target/lldap_set_password && \
|
||||
chmod +x target/lldap && \
|
||||
chmod +x target/lldap_migration_tool && \
|
||||
chmod +x target/lldap_set_password && \
|
||||
ls -la target/ . && \
|
||||
pwd \
|
||||
; fi
|
||||
|
||||
RUN if [ "${TARGETPLATFORM}" = "linux/arm/v7" ]; then \
|
||||
mv bin/armv7-unknown-linux-musleabihf-lldap-bin/lldap target/lldap && \
|
||||
mv bin/armv7-unknown-linux-musleabihf-lldap_migration_tool-bin/lldap_migration_tool target/lldap_migration_tool && \
|
||||
mv bin/armv7-unknown-linux-musleabihf-lldap_set_password-bin/lldap_set_password target/lldap_set_password && \
|
||||
chmod +x target/lldap && \
|
||||
chmod +x target/lldap_migration_tool && \
|
||||
chmod +x target/lldap_set_password && \
|
||||
ls -la target/ . && \
|
||||
pwd \
|
||||
; fi
|
||||
|
||||
# Web and App dir
|
||||
COPY docker-entrypoint.sh /docker-entrypoint.sh
|
||||
COPY lldap_config.docker_template.toml /lldap/
|
||||
COPY web/index_local.html web/index.html
|
||||
RUN cp target/lldap /lldap/ && \
|
||||
cp target/lldap_migration_tool /lldap/ && \
|
||||
cp target/lldap_set_password /lldap/ && \
|
||||
cp -R web/index.html \
|
||||
web/pkg \
|
||||
web/static \
|
||||
/lldap/app/
|
||||
|
||||
WORKDIR /lldap
|
||||
RUN set -x \
|
||||
&& for file in $(cat /lldap/app/static/libraries.txt); do wget -P app/static "$file"; done \
|
||||
&& for file in $(cat /lldap/app/static/fonts/fonts.txt); do wget -P app/static/fonts "$file"; done \
|
||||
&& chmod a+r -R .
|
||||
|
||||
FROM debian:bullseye-slim
|
||||
ENV UID=1000
|
||||
ENV GID=1000
|
||||
ENV USER=lldap
|
||||
RUN apt update && \
|
||||
apt install -y --no-install-recommends tini openssl ca-certificates tzdata && \
|
||||
apt clean && \
|
||||
rm -rf /var/lib/apt/lists/* && \
|
||||
groupadd -g $GID $USER && useradd --system -m -g $USER --uid $UID $USER && \
|
||||
mkdir -p /data && chown $USER:$USER /data
|
||||
COPY --from=lldap --chown=$USER:$USER /lldap /app
|
||||
COPY --from=lldap --chown=$USER:$USER /docker-entrypoint.sh /docker-entrypoint.sh
|
||||
VOLUME ["/data"]
|
||||
WORKDIR /app
|
||||
ENTRYPOINT ["tini", "--", "/docker-entrypoint.sh"]
|
||||
CMD ["run", "--config-file", "/data/lldap_config.toml"]
|
||||
HEALTHCHECK CMD ["/app/lldap", "healthcheck", "--config-file", "/data/lldap_config.toml"]
|
||||
@@ -1,3 +0,0 @@
|
||||
FROM localhost:5000/lldap/lldap:debian-base
|
||||
COPY --chown=$USER:$USER docker-entrypoint-rootless.sh /docker-entrypoint.sh
|
||||
USER $USER
|
||||
49
.github/workflows/Dockerfile.dev
vendored
49
.github/workflows/Dockerfile.dev
vendored
@@ -1,40 +1,45 @@
|
||||
# Keep tracking base image
|
||||
FROM rust:1.74-slim-bookworm
|
||||
FROM rust:1.66-slim-bullseye
|
||||
|
||||
# Set needed env path
|
||||
ENV PATH="/opt/armv7l-linux-musleabihf-cross/:/opt/armv7l-linux-musleabihf-cross/bin/:/opt/aarch64-linux-musl-cross/:/opt/aarch64-linux-musl-cross/bin/:/opt/x86_64-linux-musl-cross/:/opt/x86_64-linux-musl-cross/bin/:$PATH"
|
||||
ENV PATH="/opt/aarch64-linux-musl-cross/:/opt/aarch64-linux-musl-cross/bin/:/opt/x86_64-linux-musl-cross/:/opt/x86_64-linux-musl-cross/bin/:$PATH"
|
||||
|
||||
# Set building env
|
||||
ENV CARGO_REGISTRIES_CRATES_IO_PROTOCOL=sparse \
|
||||
CARGO_NET_GIT_FETCH_WITH_CLI=true \
|
||||
CARGO_TARGET_ARMV7_UNKNOWN_LINUX_MUSLEABIHF_LINKER=armv7l-linux-musleabihf-gcc \
|
||||
CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_LINKER=aarch64-linux-musl-gcc \
|
||||
CARGO_TARGET_X86_64_UNKNOWN_LINUX_MUSL_LINKER=x86_64-linux-musl-gcc \
|
||||
CC_armv7_unknown_linux_musleabihf=armv7l-linux-musleabihf-gcc \
|
||||
CC_x86_64_unknown_linux_musl=x86_64-linux-musl-gcc \
|
||||
CC_aarch64_unknown_linux_musl=aarch64-linux-musl-gcc
|
||||
|
||||
### Install Additional Build Tools
|
||||
### Install build deps x86_64
|
||||
RUN apt update && \
|
||||
apt install -y --no-install-recommends curl git wget make perl pkg-config tar jq gzip && \
|
||||
apt install -y --no-install-recommends curl git wget build-essential make perl pkg-config curl tar jq musl-tools gzip && \
|
||||
curl -fsSL https://deb.nodesource.com/setup_lts.x | bash - && \
|
||||
apt update && \
|
||||
apt install -y --no-install-recommends nodejs && \
|
||||
apt clean && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
### Add musl-gcc aarch64, x86_64 and armv7l
|
||||
|
||||
### Install build deps aarch64 build
|
||||
RUN dpkg --add-architecture arm64 && \
|
||||
apt update && \
|
||||
apt install -y gcc-aarch64-linux-gnu g++-aarch64-linux-gnu libc6-arm64-cross libc6-dev-arm64-cross gzip && \
|
||||
apt clean && \
|
||||
rm -rf /var/lib/apt/lists/* && \
|
||||
rustup target add aarch64-unknown-linux-gnu
|
||||
|
||||
### armhf deps
|
||||
RUN dpkg --add-architecture armhf && \
|
||||
apt update && \
|
||||
apt install -y gcc-arm-linux-gnueabihf g++-arm-linux-gnueabihf libc6-armhf-cross libc6-dev-armhf-cross gzip && \
|
||||
apt clean && \
|
||||
rm -rf /var/lib/apt/lists/* && \
|
||||
rustup target add armv7-unknown-linux-gnueabihf
|
||||
|
||||
### Add musl-gcc aarch64 and x86_64
|
||||
RUN wget -c https://musl.cc/x86_64-linux-musl-cross.tgz && \
|
||||
tar zxf ./x86_64-linux-musl-cross.tgz -C /opt && \
|
||||
wget -c https://musl.cc/aarch64-linux-musl-cross.tgz && \
|
||||
tar zxf ./aarch64-linux-musl-cross.tgz -C /opt && \
|
||||
wget -c http://musl.cc/armv7l-linux-musleabihf-cross.tgz && \
|
||||
tar zxf ./armv7l-linux-musleabihf-cross.tgz -C /opt && \
|
||||
rm ./x86_64-linux-musl-cross.tgz && \
|
||||
rm ./aarch64-linux-musl-cross.tgz && \
|
||||
rm ./armv7l-linux-musleabihf-cross.tgz
|
||||
rm ./aarch64-linux-musl-cross.tgz
|
||||
|
||||
### Add musl target
|
||||
RUN rustup target add x86_64-unknown-linux-musl && \
|
||||
rustup target add aarch64-unknown-linux-musl && \
|
||||
rustup target add armv7-unknown-linux-musleabihf
|
||||
rustup target add aarch64-unknown-linux-musl
|
||||
|
||||
|
||||
CMD ["bash"]
|
||||
|
||||
225
.github/workflows/docker-build-static.yml
vendored
225
.github/workflows/docker-build-static.yml
vendored
@@ -30,6 +30,7 @@ env:
|
||||
|
||||
# build-ui , create/compile the web
|
||||
### install wasm
|
||||
### install rollup
|
||||
### run app/build.sh
|
||||
### upload artifacts
|
||||
|
||||
@@ -39,10 +40,10 @@ env:
|
||||
# GitHub actions randomly timeout when downloading musl-gcc, using custom dev image #
|
||||
# Look into .github/workflows/Dockerfile.dev for development image details #
|
||||
# Using lldap dev image based on https://hub.docker.com/_/rust and musl-gcc bundled #
|
||||
# lldap/rust-dev:latest #
|
||||
#######################################################################################
|
||||
# Cargo build
|
||||
### armv7, aarch64 and amd64 is musl based
|
||||
### Cargo build
|
||||
### aarch64 and amd64 is musl based
|
||||
### armv7 is glibc based, musl had issue with time_t when cross compile https://github.com/rust-lang/libc/issues/1848
|
||||
|
||||
# build-ui,builds-armhf, build-aarch64, build-amd64 will upload artifacts will be used next job
|
||||
|
||||
@@ -50,11 +51,12 @@ env:
|
||||
### will run lldap with postgres, mariadb and sqlite backend, do selfcheck command.
|
||||
|
||||
# Build docker image
|
||||
### Triplet docker image arch with debian and alpine base
|
||||
### Triplet docker image arch with debian base
|
||||
### amd64 & aarch64 with alpine base
|
||||
# build-docker-image job will fetch artifacts and run Dockerfile.ci then push the image.
|
||||
### Look into .github/workflows/Dockerfile.ci.debian or .github/workflowds/Dockerfile.ci.alpine
|
||||
|
||||
# Create release artifacts
|
||||
# create release artifacts
|
||||
### Fetch artifacts
|
||||
### Clean up web artifact
|
||||
### Setup folder structure
|
||||
@@ -84,11 +86,11 @@ jobs:
|
||||
needs: pre_job
|
||||
if: ${{ needs.pre_job.outputs.should_skip != 'true' || github.event_name == 'release' }}
|
||||
container:
|
||||
image: lldap/rust-dev:latest
|
||||
image: nitnelave/rust-dev:latest
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4.1.1
|
||||
- uses: actions/cache@v4
|
||||
uses: actions/checkout@v3.5.2
|
||||
- uses: actions/cache@v3
|
||||
with:
|
||||
path: |
|
||||
/usr/local/cargo/bin
|
||||
@@ -99,6 +101,8 @@ jobs:
|
||||
key: lldap-ui-${{ hashFiles('**/Cargo.lock') }}
|
||||
restore-keys: |
|
||||
lldap-ui-
|
||||
- name: Install rollup (nodejs)
|
||||
run: npm install -g rollup
|
||||
- name: Add wasm target (rust)
|
||||
run: rustup target add wasm32-unknown-unknown
|
||||
- name: Install wasm-pack with cargo
|
||||
@@ -110,7 +114,7 @@ jobs:
|
||||
- name: Check build path
|
||||
run: ls -al app/
|
||||
- name: Upload ui artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ui
|
||||
path: app/
|
||||
@@ -121,19 +125,21 @@ jobs:
|
||||
needs: pre_job
|
||||
if: ${{ needs.pre_job.outputs.should_skip != 'true' || github.event_name == 'release' }}
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
target: [armv7-unknown-linux-musleabihf, aarch64-unknown-linux-musl, x86_64-unknown-linux-musl]
|
||||
target: [armv7-unknown-linux-gnueabihf, aarch64-unknown-linux-musl, x86_64-unknown-linux-musl]
|
||||
container:
|
||||
image: lldap/rust-dev:latest
|
||||
image: nitnelave/rust-dev:latest
|
||||
env:
|
||||
CARGO_TARGET_ARMV7_UNKNOWN_LINUX_GNUEABIHF_LINKER: arm-linux-gnueabihf-gcc
|
||||
CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_LINKER: aarch64-linux-musl-gcc
|
||||
CARGO_TARGET_X86_64_UNKNOWN_LINUX_MUSL_LINKER: x86_64-linux-musl-gcc
|
||||
CARGO_TERM_COLOR: always
|
||||
RUSTFLAGS: -Ctarget-feature=+crt-static
|
||||
CARGO_HOME: ${GITHUB_WORKSPACE}/.cargo
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4.1.1
|
||||
- uses: actions/cache@v4
|
||||
uses: actions/checkout@v3.5.2
|
||||
- uses: actions/cache@v3
|
||||
with:
|
||||
path: |
|
||||
.cargo/bin
|
||||
@@ -149,17 +155,17 @@ jobs:
|
||||
- name: Check path
|
||||
run: ls -al target/release
|
||||
- name: Upload ${{ matrix.target}} lldap artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.target}}-lldap-bin
|
||||
path: target/${{ matrix.target }}/release/lldap
|
||||
- name: Upload ${{ matrix.target }} migration tool artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.target }}-lldap_migration_tool-bin
|
||||
path: target/${{ matrix.target }}/release/lldap_migration_tool
|
||||
- name: Upload ${{ matrix.target }} password tool artifacts
|
||||
uses: actions/upload-artifact@v4
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: ${{ matrix.target }}-lldap_set_password-bin
|
||||
path: target/${{ matrix.target }}/release/lldap_set_password
|
||||
@@ -180,7 +186,7 @@ jobs:
|
||||
MARIADB_ALLOW_EMPTY_ROOT_PASSWORD: 1
|
||||
options: >-
|
||||
--name mariadb
|
||||
--health-cmd="mariadb-admin ping" --health-interval=5s --health-timeout=2s --health-retries=3
|
||||
--health-cmd="mysqladmin ping" --health-interval=5s --health-timeout=2s --health-retries=3
|
||||
|
||||
postgresql:
|
||||
image: postgres:latest
|
||||
@@ -199,7 +205,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Download artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: x86_64-unknown-linux-musl-lldap-bin
|
||||
path: bin/
|
||||
@@ -275,7 +281,7 @@ jobs:
|
||||
MARIADB_ALLOW_EMPTY_ROOT_PASSWORD: 1
|
||||
options: >-
|
||||
--name mariadb
|
||||
--health-cmd="mariadb-admin ping" --health-interval=5s --health-timeout=2s --health-retries=3
|
||||
--health-cmd="mysqladmin ping" --health-interval=5s --health-timeout=2s --health-retries=3
|
||||
|
||||
|
||||
mysql:
|
||||
@@ -293,19 +299,14 @@ jobs:
|
||||
|
||||
|
||||
steps:
|
||||
- name: Checkout scripts
|
||||
uses: actions/checkout@v4.1.1
|
||||
with:
|
||||
sparse-checkout: 'scripts'
|
||||
|
||||
- name: Download LLDAP artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: x86_64-unknown-linux-musl-lldap-bin
|
||||
path: bin/
|
||||
|
||||
- name: Download LLDAP set password
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: x86_64-unknown-linux-musl-lldap_set_password-bin
|
||||
path: bin/
|
||||
@@ -346,8 +347,10 @@ jobs:
|
||||
|
||||
- name: Export and Converting to Postgress
|
||||
run: |
|
||||
bash ./scripts/sqlite_dump_commands.sh | sqlite3 ./users.db > ./dump.sql
|
||||
sed -i -r -e "s/X'([[:xdigit:]]+'[^'])/'\\\x\\1/g" -e ":a; s/(INSERT INTO (user_attribute_schema|jwt_storage)\(.*\) VALUES\(.*),1([^']*\);)$/\1,true\3/; s/(INSERT INTO (user_attribute_schema|jwt_storage)\(.*\) VALUES\(.*),0([^']*\);)$/\1,false\3/; ta" -e '1s/^/BEGIN;\n/' -e '$aCOMMIT;' ./dump.sql
|
||||
curl -L https://raw.githubusercontent.com/lldap/lldap/main/scripts/sqlite_dump_commands.sh -o helper.sh
|
||||
chmod +x ./helper.sh
|
||||
./helper.sh | sqlite3 ./users.db > ./dump.sql
|
||||
sed -i -r -e "s/X'([[:xdigit:]]+'[^'])/'\\\x\\1/g" -e '1s/^/BEGIN;\n/' -e '$aCOMMIT;' ./dump.sql
|
||||
|
||||
- name: Create schema on postgres
|
||||
run: |
|
||||
@@ -355,14 +358,16 @@ jobs:
|
||||
|
||||
- name: Copy converted db to postgress and import
|
||||
run: |
|
||||
docker ps -a
|
||||
docker cp ./dump.sql postgresql:/tmp/dump.sql
|
||||
docker exec postgresql bash -c "psql -U lldapuser -d lldap < /tmp/dump.sql" | tee import.log
|
||||
docker exec postgresql bash -c "psql -U lldapuser -d lldap < /tmp/dump.sql"
|
||||
rm ./dump.sql
|
||||
! grep ERROR import.log > /dev/null
|
||||
|
||||
- name: Export and Converting to mariadb
|
||||
run: |
|
||||
bash ./scripts/sqlite_dump_commands.sh | sqlite3 ./users.db > ./dump.sql
|
||||
curl -L https://raw.githubusercontent.com/lldap/lldap/main/scripts/sqlite_dump_commands.sh -o helper.sh
|
||||
chmod +x ./helper.sh
|
||||
./helper.sh | sqlite3 ./users.db > ./dump.sql
|
||||
cp ./dump.sql ./dump-no-sed.sql
|
||||
sed -i -r -e "s/([^']'[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{9})\+00:00'([^'])/\1'\2/g" \-e 's/^INSERT INTO "?([a-zA-Z0-9_]+)"?/INSERT INTO `\1`/' -e '1s/^/START TRANSACTION;\n/' -e '$aCOMMIT;' ./dump.sql
|
||||
sed -i '1 i\SET FOREIGN_KEY_CHECKS = 0;' ./dump.sql
|
||||
@@ -372,14 +377,16 @@ jobs:
|
||||
|
||||
- name: Copy converted db to mariadb and import
|
||||
run: |
|
||||
docker ps -a
|
||||
docker cp ./dump.sql mariadb:/tmp/dump.sql
|
||||
docker exec mariadb bash -c "mariadb -ulldapuser -plldappass -f lldap < /tmp/dump.sql" | tee import.log
|
||||
docker exec mariadb bash -c "mariadb -ulldapuser -plldappass -f lldap < /tmp/dump.sql"
|
||||
rm ./dump.sql
|
||||
! grep ERROR import.log > /dev/null
|
||||
|
||||
- name: Export and Converting to mysql
|
||||
run: |
|
||||
bash ./scripts/sqlite_dump_commands.sh | sqlite3 ./users.db > ./dump.sql
|
||||
curl -L https://raw.githubusercontent.com/lldap/lldap/main/scripts/sqlite_dump_commands.sh -o helper.sh
|
||||
chmod +x ./helper.sh
|
||||
./helper.sh | sqlite3 ./users.db > ./dump.sql
|
||||
sed -i -r -e 's/^INSERT INTO "?([a-zA-Z0-9_]+)"?/INSERT INTO `\1`/' -e '1s/^/START TRANSACTION;\n/' -e '$aCOMMIT;' ./dump.sql
|
||||
sed -i '1 i\SET FOREIGN_KEY_CHECKS = 0;' ./dump.sql
|
||||
|
||||
@@ -388,10 +395,10 @@ jobs:
|
||||
|
||||
- name: Copy converted db to mysql and import
|
||||
run: |
|
||||
docker ps -a
|
||||
docker cp ./dump.sql mysql:/tmp/dump.sql
|
||||
docker exec mysql bash -c "mysql -ulldapuser -plldappass -f lldap < /tmp/dump.sql" | tee import.log
|
||||
docker exec mysql bash -c "mysql -ulldapuser -plldappass -f lldap < /tmp/dump.sql"
|
||||
rm ./dump.sql
|
||||
! grep ERROR import.log > /dev/null
|
||||
|
||||
- name: Run lldap with postgres DB and healthcheck again
|
||||
run: |
|
||||
@@ -427,16 +434,12 @@ jobs:
|
||||
LLDAP_http_port: 17173
|
||||
LLDAP_JWT_SECRET: somejwtsecret
|
||||
|
||||
- name: Test Dummy User Postgres
|
||||
run: ldapsearch -H ldap://localhost:3891 -LLL -D "uid=dummyuser,ou=people,dc=example,dc=com" -w 'dummypassword' -s "One" -b "ou=people,dc=example,dc=com"
|
||||
- name: Test Dummy User MariaDB
|
||||
run: ldapsearch -H ldap://localhost:3892 -LLL -D "uid=dummyuser,ou=people,dc=example,dc=com" -w 'dummypassword' -s "One" -b "ou=people,dc=example,dc=com"
|
||||
- name: Test Dummy User MySQL
|
||||
run: ldapsearch -H ldap://localhost:3893 -LLL -D "uid=dummyuser,ou=people,dc=example,dc=com" -w 'dummypassword' -s "One" -b "ou=people,dc=example,dc=com"
|
||||
- name: Test Dummy User
|
||||
run: |
|
||||
ldapsearch -H ldap://localhost:3891 -LLL -D "uid=dummyuser,ou=people,dc=example,dc=com" -w 'dummypassword' -s "One" -b "ou=people,dc=example,dc=com"
|
||||
ldapsearch -H ldap://localhost:3892 -LLL -D "uid=dummyuser,ou=people,dc=example,dc=com" -w 'dummypassword' -s "One" -b "ou=people,dc=example,dc=com"
|
||||
ldapsearch -H ldap://localhost:3893 -LLL -D "uid=dummyuser,ou=people,dc=example,dc=com" -w 'dummypassword' -s "One" -b "ou=people,dc=example,dc=com"
|
||||
|
||||
########################################
|
||||
#### BUILD BASE IMAGE ##################
|
||||
########################################
|
||||
build-docker-image:
|
||||
needs: [build-ui, build-bin]
|
||||
name: Build Docker image
|
||||
@@ -446,7 +449,7 @@ jobs:
|
||||
container: ["debian","alpine"]
|
||||
include:
|
||||
- container: alpine
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: |
|
||||
type=ref,event=pr
|
||||
type=semver,pattern=v{{version}}
|
||||
@@ -459,8 +462,6 @@ jobs:
|
||||
type=raw,value=stable,enable=${{ startsWith(github.ref, 'refs/tags/v') }}
|
||||
type=raw,value=stable,enable=${{ startsWith(github.ref, 'refs/tags/v') }},suffix=
|
||||
type=raw,value=latest,enable={{ is_default_branch }},suffix=
|
||||
type=raw,value={{ date 'YYYY-MM-DD' }},enable={{ is_default_branch }}
|
||||
type=raw,value={{ date 'YYYY-MM-DD' }},enable={{ is_default_branch }},suffix=
|
||||
- container: debian
|
||||
platforms: linux/amd64,linux/arm64,linux/arm/v7
|
||||
tags: |
|
||||
@@ -470,102 +471,31 @@ jobs:
|
||||
type=semver,pattern=v{{major}}.{{minor}}
|
||||
type=raw,value=latest,enable={{ is_default_branch }}
|
||||
type=raw,value=stable,enable=${{ startsWith(github.ref, 'refs/tags/v') }}
|
||||
type=raw,value={{ date 'YYYY-MM-DD' }},enable={{ is_default_branch }}
|
||||
services:
|
||||
registry:
|
||||
image: registry:2
|
||||
ports:
|
||||
- 5000:5000
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4.1.1
|
||||
|
||||
uses: actions/checkout@v3.5.2
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
path: bin
|
||||
|
||||
- name: Download llap ui artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ui
|
||||
path: web
|
||||
|
||||
- name: Setup QEMU
|
||||
uses: docker/setup-qemu-action@v3
|
||||
- name: Setup buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
with:
|
||||
driver-opts: network=host
|
||||
|
||||
- name: Docker ${{ matrix.container }} Base meta
|
||||
id: meta-base
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
# list of Docker images to use as base name for tags
|
||||
images: |
|
||||
localhost:5000/lldap/lldap
|
||||
tags: ${{ matrix.container }}-base
|
||||
|
||||
- name: Build ${{ matrix.container }} Base Docker Image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
# On PR will fail, force fully uncomment push: true, or docker image will fail for next steps
|
||||
#push: ${{ github.event_name != 'pull_request' }}
|
||||
push: true
|
||||
platforms: ${{ matrix.platforms }}
|
||||
file: ./.github/workflows/Dockerfile.ci.${{ matrix.container }}-base
|
||||
tags: |
|
||||
${{ steps.meta-base.outputs.tags }}
|
||||
labels: ${{ steps.meta-base.outputs.labels }}
|
||||
cache-from: type=gha,mode=max
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
#####################################
|
||||
#### build variants docker image ####
|
||||
#####################################
|
||||
|
||||
- name: Docker ${{ matrix.container }}-rootless meta
|
||||
id: meta-rootless
|
||||
uses: docker/metadata-action@v5
|
||||
with:
|
||||
# list of Docker images to use as base name for tags
|
||||
images: |
|
||||
nitnelave/lldap
|
||||
lldap/lldap
|
||||
ghcr.io/lldap/lldap
|
||||
# Wanted Docker tags
|
||||
# vX-alpine
|
||||
# vX.Y-alpine
|
||||
# vX.Y.Z-alpine
|
||||
# latest
|
||||
# latest-alpine
|
||||
# stable
|
||||
# stable-alpine
|
||||
# YYYY-MM-DD
|
||||
# YYYY-MM-DD-alpine
|
||||
#################
|
||||
# vX-debian
|
||||
# vX.Y-debian
|
||||
# vX.Y.Z-debian
|
||||
# latest-debian
|
||||
# stable-debian
|
||||
# YYYY-MM-DD-debian
|
||||
#################
|
||||
# Check matrix for tag list definition
|
||||
flavor: |
|
||||
latest=false
|
||||
suffix=-${{ matrix.container }}-rootless
|
||||
tags: ${{ matrix.tags }}
|
||||
uses: docker/setup-qemu-action@v2
|
||||
- uses: docker/setup-buildx-action@v2
|
||||
|
||||
- name: Docker ${{ matrix.container }} meta
|
||||
id: meta-standard
|
||||
uses: docker/metadata-action@v5
|
||||
id: meta
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
# list of Docker images to use as base name for tags
|
||||
images: |
|
||||
@@ -580,15 +510,12 @@ jobs:
|
||||
# latest-alpine
|
||||
# stable
|
||||
# stable-alpine
|
||||
# YYYY-MM-DD
|
||||
# YYYY-MM-DD-alpine
|
||||
#################
|
||||
# vX-debian
|
||||
# vX.Y-debian
|
||||
# vX.Y.Z-debian
|
||||
# latest-debian
|
||||
# stable-debian
|
||||
# YYYY-MM-DD-debian
|
||||
#################
|
||||
# Check matrix for tag list definition
|
||||
flavor: |
|
||||
@@ -599,43 +526,33 @@ jobs:
|
||||
# Docker login to nitnelave/lldap and lldap/lldap
|
||||
- name: Login to Nitnelave/LLDAP Docker Hub
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@v3
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
- name: Login to GitHub Container Registry
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@v3
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: nitnelave
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build ${{ matrix.container }}-rootless Docker Image
|
||||
uses: docker/build-push-action@v5
|
||||
with:
|
||||
context: .
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
platforms: ${{ matrix.platforms }}
|
||||
file: ./.github/workflows/Dockerfile.ci.${{ matrix.container }}-rootless
|
||||
tags: |
|
||||
${{ steps.meta-rootless.outputs.tags }}
|
||||
labels: ${{ steps.meta-rootless.outputs.labels }}
|
||||
cache-from: type=gha,mode=max
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
### This docker build always the last, due :latest tag pushed multiple times, for whatever variants may added in future add docker build above this
|
||||
########################################
|
||||
#### docker image build ####
|
||||
########################################
|
||||
- name: Build ${{ matrix.container }} Docker Image
|
||||
uses: docker/build-push-action@v5
|
||||
uses: docker/build-push-action@v4
|
||||
with:
|
||||
context: .
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
platforms: ${{ matrix.platforms }}
|
||||
file: ./.github/workflows/Dockerfile.ci.${{ matrix.container }}
|
||||
tags: |
|
||||
${{ steps.meta-standard.outputs.tags }}
|
||||
labels: ${{ steps.meta-standard.outputs.labels }}
|
||||
${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
cache-from: type=gha,mode=max
|
||||
cache-to: type=gha,mode=max
|
||||
|
||||
@@ -667,7 +584,7 @@ jobs:
|
||||
contents: write
|
||||
steps:
|
||||
- name: Download all artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
path: bin/
|
||||
- name: Check file
|
||||
@@ -676,19 +593,19 @@ jobs:
|
||||
run: |
|
||||
mv bin/aarch64-unknown-linux-musl-lldap-bin/lldap bin/aarch64-lldap
|
||||
mv bin/x86_64-unknown-linux-musl-lldap-bin/lldap bin/amd64-lldap
|
||||
mv bin/armv7-unknown-linux-musleabihf-lldap-bin/lldap bin/armhf-lldap
|
||||
mv bin/armv7-unknown-linux-gnueabihf-lldap-bin/lldap bin/armhf-lldap
|
||||
mv bin/aarch64-unknown-linux-musl-lldap_migration_tool-bin/lldap_migration_tool bin/aarch64-lldap_migration_tool
|
||||
mv bin/x86_64-unknown-linux-musl-lldap_migration_tool-bin/lldap_migration_tool bin/amd64-lldap_migration_tool
|
||||
mv bin/armv7-unknown-linux-musleabihf-lldap_migration_tool-bin/lldap_migration_tool bin/armhf-lldap_migration_tool
|
||||
mv bin/armv7-unknown-linux-gnueabihf-lldap_migration_tool-bin/lldap_migration_tool bin/armhf-lldap_migration_tool
|
||||
mv bin/aarch64-unknown-linux-musl-lldap_set_password-bin/lldap_set_password bin/aarch64-lldap_set_password
|
||||
mv bin/x86_64-unknown-linux-musl-lldap_set_password-bin/lldap_set_password bin/amd64-lldap_set_password
|
||||
mv bin/armv7-unknown-linux-musleabihf-lldap_set_password-bin/lldap_set_password bin/armhf-lldap_set_password
|
||||
mv bin/armv7-unknown-linux-gnueabihf-lldap_set_password-bin/lldap_set_password bin/armhf-lldap_set_password
|
||||
chmod +x bin/*-lldap
|
||||
chmod +x bin/*-lldap_migration_tool
|
||||
chmod +x bin/*-lldap_set_password
|
||||
|
||||
- name: Download llap ui artifacts
|
||||
uses: actions/download-artifact@v4
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: ui
|
||||
path: web
|
||||
|
||||
20
.github/workflows/release-bot.yml
vendored
20
.github/workflows/release-bot.yml
vendored
@@ -1,20 +0,0 @@
|
||||
name: Release Bot
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
jobs:
|
||||
comment:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
steps:
|
||||
- uses: nflaig/release-comment-on-pr@master
|
||||
with:
|
||||
token: ${{ secrets.RELEASE_BOT_TOKEN }}
|
||||
message: |
|
||||
Thank you everyone for the contribution!
|
||||
This feature is now available in the latest release, [${releaseTag}](${releaseUrl}).
|
||||
You can support LLDAP by starring our repo, contributing some configuration examples and becoming a sponsor.
|
||||
37
.github/workflows/rust.yml
vendored
37
.github/workflows/rust.yml
vendored
@@ -33,7 +33,7 @@ jobs:
|
||||
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4.1.1
|
||||
uses: actions/checkout@v3.5.2
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
- name: Build
|
||||
run: cargo build --verbose --workspace
|
||||
@@ -52,7 +52,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4.1.1
|
||||
uses: actions/checkout@v3.5.2
|
||||
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
@@ -69,7 +69,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4.1.1
|
||||
uses: actions/checkout@v3.5.2
|
||||
|
||||
- uses: Swatinem/rust-cache@v2
|
||||
|
||||
@@ -81,14 +81,12 @@ jobs:
|
||||
|
||||
coverage:
|
||||
name: Code coverage
|
||||
needs:
|
||||
- pre_job
|
||||
- test
|
||||
needs: pre_job
|
||||
if: ${{ needs.pre_job.outputs.should_skip != 'true' || (github.event_name == 'push' && github.ref == 'refs/heads/main') }}
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout sources
|
||||
uses: actions/checkout@v4.1.1
|
||||
uses: actions/checkout@v3.5.2
|
||||
|
||||
- name: Install Rust
|
||||
run: rustup toolchain install nightly --component llvm-tools-preview && rustup component add llvm-tools-preview --toolchain stable-x86_64-unknown-linux-gnu
|
||||
@@ -114,3 +112,28 @@ jobs:
|
||||
files: lcov.info
|
||||
fail_ci_if_error: true
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
publish-crates:
|
||||
name: Publish on crates.io
|
||||
if: ${{ needs.pre_job.outputs.should_skip != 'true' || (github.event_name == 'push' && github.ref == 'refs/heads/main') || github.event_name == 'release' }}
|
||||
needs: pre_job
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
target: [lldap_auth, lldap, lldap_app, lldap_set_password, lldap_migration_tool]
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3.5.2
|
||||
- uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: stable
|
||||
override: true
|
||||
- name: Publish ${{ matrix.target }} crate
|
||||
uses: katyo/publish-crates@v2
|
||||
with:
|
||||
args: -p ${{ matrix.target }}
|
||||
dry-run: ${{ github.event_name != 'release' }}
|
||||
check-repo: ${{ github.event_name != 'pull_request' }}
|
||||
registry-token: ${{ secrets.CARGO_REGISTRY_TOKEN }}
|
||||
ignore-unpublished-changes: ${{ github.event_name != 'release' }}
|
||||
|
||||
|
||||
63
CHANGELOG.md
63
CHANGELOG.md
@@ -5,69 +5,6 @@ All notable changes to this project will be documented in this file.
|
||||
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
|
||||
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
|
||||
|
||||
## [0.5.0] 2023-09-14
|
||||
|
||||
### Breaking
|
||||
|
||||
- Emails and UUIDs are now enforced to be unique.
|
||||
- If you have several users with the same email, you'll have to disambiguate
|
||||
them. You can do that by either issuing SQL commands directly
|
||||
(`UPDATE users SET email = 'x@x' WHERE user_id = 'bob';`), or by reverting
|
||||
to a 0.4.x version of LLDAP and editing the user through the web UI.
|
||||
An error will prevent LLDAP 0.5+ from starting otherwise.
|
||||
- This was done to prevent account takeover for systems that allow to
|
||||
login via email.
|
||||
|
||||
### Added
|
||||
|
||||
- The server private key can be set as a seed from an env variable (#504).
|
||||
- This is especially useful when you have multiple containers, they don't
|
||||
need to share a writeable folder.
|
||||
- Added support for changing the password through a plain LDAP Modify
|
||||
operation (as opposed to an extended operation), to allow Jellyfin
|
||||
to change password (#620).
|
||||
- Allow creating a user with multiple objectClass (#612).
|
||||
- Emails now have a message ID (#608).
|
||||
- Added a warning for browsers that have WASM/JS disabled (#639).
|
||||
- Added support for querying OUs in LDAP (#669).
|
||||
- Added a button to clear the avatar in the UI (#358).
|
||||
|
||||
|
||||
### Changed
|
||||
|
||||
- Groups are now sorted by name in the web UI (#623).
|
||||
- ARM build now uses musl (#584).
|
||||
- Improved logging.
|
||||
- Default admin user is only created if there are no admins (#563).
|
||||
- That allows you to remove the default admin, making it harder to
|
||||
bruteforce.
|
||||
|
||||
### Fixed
|
||||
|
||||
- Fixed URL parsing with a trailing slash in the password setting utility
|
||||
(#597).
|
||||
|
||||
In addition to all that, there was significant progress towards #67,
|
||||
user-defined attributes. That complex feature will unblock integration with many
|
||||
systems, including PAM authentication.
|
||||
|
||||
### New services
|
||||
|
||||
- Ejabberd
|
||||
- Ergo
|
||||
- LibreNMS
|
||||
- Mealie
|
||||
- MinIO
|
||||
- OpnSense
|
||||
- PfSense
|
||||
- PowerDnsAdmin
|
||||
- Proxmox
|
||||
- Squid
|
||||
- Tandoor recipes
|
||||
- TheLounge
|
||||
- Zabbix-web
|
||||
- Zulip
|
||||
|
||||
## [0.4.3] 2023-04-11
|
||||
|
||||
The repository has changed from `nitnelave/lldap` to `lldap/lldap`, both on GitHub
|
||||
|
||||
@@ -1,97 +0,0 @@
|
||||
# How to contribute to LLDAP
|
||||
|
||||
## Did you find a bug?
|
||||
|
||||
- Make sure there isn't already an [issue](https://github.com/lldap/lldap/issues?q=is%3Aissue+is%3Aopen) for it.
|
||||
- Check if the bug still happens with the `latest` docker image, or the `main` branch if you compile it yourself.
|
||||
- [Create an issue](https://github.com/lldap/lldap/issues/new) on GitHub. What makes a great issue:
|
||||
- A quick summary of the bug.
|
||||
- Steps to reproduce.
|
||||
- LLDAP _verbose_ logs when reproducing the bug. Verbose mode can be set through environment variables (`LLDAP_VERBOSE=true`) or in the config (`verbose = true`).
|
||||
- What you expected to happen.
|
||||
- What actually happened.
|
||||
- Other notes (what you tried, why you think it's happening, ...).
|
||||
|
||||
## Are you requesting integration with a new service?
|
||||
|
||||
- Check if there is already an [example config](https://github.com/lldap/lldap/tree/main/example_configs) for it.
|
||||
- Try to figure out the configuration values for the new service yourself.
|
||||
- You can use other example configs for inspiration.
|
||||
- If you're having trouble, you can ask on [Discord](https://discord.gg/h5PEdRMNyP)
|
||||
- If you succeed, make sure to contribute an example configuration, or a configuration guide.
|
||||
- If you hit a block because of an unimplemented feature, go to the next section.
|
||||
|
||||
## Are you asking for a new feature?
|
||||
|
||||
- Make sure there isn't already an [issue](https://github.com/lldap/lldap/issues?q=is%3Aissue+is%3Aopen) for it.
|
||||
- [Create an issue](https://github.com/lldap/lldap/issues/new) on GitHub. What makes a great feature request:
|
||||
- A quick summary of the feature.
|
||||
- Motivation: what problem does the feature solve?
|
||||
- Workarounds: what are the currently possible solutions to the problem, however bad?
|
||||
|
||||
## Do you want to work on a PR?
|
||||
|
||||
That's great! There are 2 main ways to contribute to the project: documentation and code.
|
||||
|
||||
### Documentation
|
||||
|
||||
The simplest way to contribute is to submit a configuration guide for a new
|
||||
service: it can be an example configuration file, or a markdown guide
|
||||
explaining the steps necessary to configure the service.
|
||||
|
||||
We also have some
|
||||
[documentation](https://github.com/lldap/lldap/tree/main/docs) with more
|
||||
advanced guides (scripting, migrations, ...) you can contribute to.
|
||||
|
||||
### Code
|
||||
|
||||
If you don't know what to start with, check out the
|
||||
[good first issues](https://github.com/lldap/lldap/labels/good%20first%20issue).
|
||||
|
||||
Otherwise, if you want to fix a specific bug or implement a feature, make sure
|
||||
to start by creating an issue for it (if it doesn't already exist). There, we
|
||||
can discuss whether it would be likely to be accepted and consider design
|
||||
issues. That will save you from going down a wrong path, creating an entire PR
|
||||
before getting told that it doesn't align with the project or the design is
|
||||
flawed!
|
||||
|
||||
Once we agree on what to do in the issue, you can start working on the PR. A good quality PR has:
|
||||
- A description of the change.
|
||||
- The format we use for both commit titles and PRs is:
|
||||
`tag: Do the thing`
|
||||
The tag can be: server, app, docker, example_configs, ... It's a broad category.
|
||||
The rest of the title should be an imperative sentence (see for instance [Commit Message
|
||||
Guidelines](https://gist.github.com/robertpainsi/b632364184e70900af4ab688decf6f53)).
|
||||
- The PR should refer to the issue it's addressing (e.g. "Fix #123").
|
||||
- Explain the _why_ of the change.
|
||||
- But also the _how_.
|
||||
- Highlight any potential flaw or limitation.
|
||||
- The code change should be as small as possible while solving the problem.
|
||||
- Don't try to code-golf to change fewer characters, but keep logically separate changes in
|
||||
different PRs.
|
||||
- Add tests if possible.
|
||||
- The tests should highlight the original issue in case of a bug.
|
||||
- Ideally, we can apply the tests without the rest of the change and they would fail. With the
|
||||
change, they pass.
|
||||
- In some areas, there is no test infrastructure in place (e.g. for frontend changes). In that
|
||||
case, do some manual testing and include the results (logs for backend changes, screenshot of a
|
||||
successful service integration, screenshot of the frontend change).
|
||||
- For backend changes, the tests should cover a significant portion of the new code paths, or
|
||||
everything if possible. You can also add more tests to cover existing code.
|
||||
- Of course, make sure all the existing tests pass. This will be checked anyway in the GitHub CI.
|
||||
|
||||
### Workflow
|
||||
|
||||
We use [GitHub Flow](https://docs.github.com/en/get-started/quickstart/github-flow):
|
||||
- Fork the repository.
|
||||
- (Optional) Create a new branch, or just use `main` in your fork.
|
||||
- Make your change.
|
||||
- Create a PR.
|
||||
- Address the comments by adding more commits to your branch (or to `main`).
|
||||
- The PR gets merged (the commits get squashed to a single one).
|
||||
- (Optional) You can delete your branch/fork.
|
||||
|
||||
## Reminder
|
||||
|
||||
We're all volunteers, so be kind to each other! And since we're doing that in our free time, some
|
||||
things can take a longer than expected.
|
||||
966
Cargo.lock
generated
966
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -9,8 +9,6 @@ members = [
|
||||
|
||||
default-members = ["server"]
|
||||
|
||||
resolver = "2"
|
||||
|
||||
[profile.release]
|
||||
lto = true
|
||||
|
||||
|
||||
167
README.md
167
README.md
@@ -5,9 +5,9 @@
|
||||
</p>
|
||||
|
||||
<p align="center">
|
||||
<a href="https://github.com/lldap/lldap/actions/workflows/rust.yml?query=branch%3Amain">
|
||||
<a href="https://github.com/nitnelave/lldap/actions/workflows/rust.yml?query=branch%3Amain">
|
||||
<img
|
||||
src="https://github.com/lldap/lldap/actions/workflows/rust.yml/badge.svg"
|
||||
src="https://github.com/nitnelave/lldap/actions/workflows/rust.yml/badge.svg"
|
||||
alt="Build"/>
|
||||
</a>
|
||||
<a href="https://discord.gg/h5PEdRMNyP">
|
||||
@@ -36,24 +36,15 @@
|
||||
- [About](#about)
|
||||
- [Installation](#installation)
|
||||
- [With Docker](#with-docker)
|
||||
- [With Kubernetes](#with-kubernetes)
|
||||
- [From a package repository](#from-a-package-repository)
|
||||
- [From source](#from-source)
|
||||
- [Backend](#backend)
|
||||
- [Frontend](#frontend)
|
||||
- [Cross-compilation](#cross-compilation)
|
||||
- [Usage](#usage)
|
||||
- [Recommended architecture](#recommended-architecture)
|
||||
- [Client configuration](#client-configuration)
|
||||
- [Compatible services](#compatible-services)
|
||||
- [General configuration guide](#general-configuration-guide)
|
||||
- [Sample client configurations](#sample-client-configurations)
|
||||
- [Incompatible services](#incompatible-services)
|
||||
- [Migrating from SQLite](#migrating-from-sqlite)
|
||||
- [Comparisons with other services](#comparisons-with-other-services)
|
||||
- [vs OpenLDAP](#vs-openldap)
|
||||
- [vs FreeIPA](#vs-freeipa)
|
||||
- [vs Kanidm](#vs-kanidm)
|
||||
- [I can't log in!](#i-cant-log-in)
|
||||
- [Contributions](#contributions)
|
||||
|
||||
@@ -65,7 +56,7 @@ many backends, from KeyCloak to Authelia to Nextcloud and
|
||||
[more](#compatible-services)!
|
||||
|
||||
<img
|
||||
src="https://raw.githubusercontent.com/lldap/lldap/master/screenshot.png"
|
||||
src="https://raw.githubusercontent.com/nitnelave/lldap/master/screenshot.png"
|
||||
alt="Screenshot of the user list page"
|
||||
width="50%"
|
||||
align="right"
|
||||
@@ -98,10 +89,9 @@ MySQL/MariaDB or PostgreSQL.
|
||||
|
||||
### With Docker
|
||||
|
||||
The image is available at `lldap/lldap`. You should persist the `/data`
|
||||
folder, which contains your configuration and the SQLite database (you can
|
||||
remove this step if you use a different DB and configure with environment
|
||||
variables only).
|
||||
The image is available at `nitnelave/lldap`. You should persist the `/data`
|
||||
folder, which contains your configuration, the database and the private key
|
||||
file.
|
||||
|
||||
Configure the server by copying the `lldap_config.docker_template.toml` to
|
||||
`/data/lldap_config.toml` and updating the configuration values (especially the
|
||||
@@ -109,12 +99,10 @@ Configure the server by copying the `lldap_config.docker_template.toml` to
|
||||
Environment variables should be prefixed with `LLDAP_` to override the
|
||||
configuration.
|
||||
|
||||
If the `lldap_config.toml` doesn't exist when starting up, LLDAP will use
|
||||
default one. The default admin password is `password`, you can change the
|
||||
password later using the web interface.
|
||||
If the `lldap_config.toml` doesn't exist when starting up, LLDAP will use default one. The default admin password is `password`, you can change the password later using the web interface.
|
||||
|
||||
Secrets can also be set through a file. The filename should be specified by the
|
||||
variables `LLDAP_JWT_SECRET_FILE` or `LLDAP_KEY_SEED_FILE`, and the file
|
||||
variables `LLDAP_JWT_SECRET_FILE` or `LLDAP_LDAP_USER_PASS_FILE`, and the file
|
||||
contents are loaded into the respective configuration parameters. Note that
|
||||
`_FILE` variables take precedence.
|
||||
|
||||
@@ -124,7 +112,6 @@ Example for docker compose:
|
||||
- `:latest` tag image contains recently pushed code or feature tests, in which some instability can be expected.
|
||||
- If `UID` and `GID` no defined LLDAP will use default `UID` and `GID` number `1000`.
|
||||
- If no `TZ` is set, default `UTC` timezone will be used.
|
||||
- You can generate the secrets by running `./generate_secrets.sh`
|
||||
|
||||
```yaml
|
||||
version: "3"
|
||||
@@ -135,12 +122,10 @@ volumes:
|
||||
|
||||
services:
|
||||
lldap:
|
||||
image: lldap/lldap:stable
|
||||
image: nitnelave/lldap:stable
|
||||
ports:
|
||||
# For LDAP, not recommended to expose, see Usage section.
|
||||
#- "3890:3890"
|
||||
# For LDAPS (LDAP Over SSL), enable port if LLDAP_LDAPS_OPTIONS__ENABLED set true, look env below
|
||||
#- "6360:6360"
|
||||
# For LDAP
|
||||
- "3890:3890"
|
||||
# For the web front-end
|
||||
- "17170:17170"
|
||||
volumes:
|
||||
@@ -152,12 +137,8 @@ services:
|
||||
- GID=####
|
||||
- TZ=####/####
|
||||
- LLDAP_JWT_SECRET=REPLACE_WITH_RANDOM
|
||||
- LLDAP_KEY_SEED=REPLACE_WITH_RANDOM
|
||||
- LLDAP_LDAP_USER_PASS=REPLACE_WITH_PASSWORD
|
||||
- LLDAP_LDAP_BASE_DN=dc=example,dc=com
|
||||
# If using LDAPS, set enabled true and configure cert and key path
|
||||
# - LLDAP_LDAPS_OPTIONS__ENABLED=true
|
||||
# - LLDAP_LDAPS_OPTIONS__CERT_FILE=/path/to/certfile.crt
|
||||
# - LLDAP_LDAPS_OPTIONS__KEY_FILE=/path/to/keyfile.key
|
||||
# You can also set a different database:
|
||||
# - LLDAP_DATABASE_URL=mysql://mysql-user:password@mysql-server/my-database
|
||||
# - LLDAP_DATABASE_URL=postgres://postgres-user:password@postgres-server/my-database
|
||||
@@ -170,44 +151,6 @@ front-end.
|
||||
|
||||
See https://github.com/Evantage-WS/lldap-kubernetes for a LLDAP deployment for Kubernetes
|
||||
|
||||
You can bootstrap your lldap instance (users, groups)
|
||||
using [bootstrap.sh](example_configs/bootstrap/bootstrap.md#kubernetes-job).
|
||||
It can be run by Argo CD for managing users in git-opt way, or as a one-shot job.
|
||||
|
||||
### From a package repository
|
||||
|
||||
**Do not open issues in this repository for problems with third-party
|
||||
pre-built packages. Report issues downstream.**
|
||||
|
||||
Depending on the distribution you use, it might be possible to install lldap
|
||||
from a package repository, officially supported by the distribution or
|
||||
community contributed.
|
||||
|
||||
#### Debian, CentOS Fedora, OpenSUSE, Ubuntu
|
||||
|
||||
The package for these distributions can be found at [LLDAP OBS](https://software.opensuse.org//download.html?project=home%3AMasgalor%3ALLDAP&package=lldap).
|
||||
- When using the distributed package, the default login is `admin/password`. You can change that from the web UI after starting the service.
|
||||
|
||||
#### Arch Linux
|
||||
|
||||
Arch Linux offers unofficial support through the [Arch User Repository
|
||||
(AUR)](https://wiki.archlinux.org/title/Arch_User_Repository).
|
||||
Available package descriptions in AUR are:
|
||||
|
||||
- [lldap](https://aur.archlinux.org/packages/lldap) - Builds the latest stable version.
|
||||
- [lldap-bin](https://aur.archlinux.org/packages/lldap-bin) - Uses the latest
|
||||
pre-compiled binaries from the [releases in this repository](https://github.com/lldap/lldap/releases).
|
||||
This package is recommended if you want to run lldap on a system with
|
||||
limited resources.
|
||||
- [lldap-git](https://aur.archlinux.org/packages/lldap-git) - Builds the
|
||||
latest main branch code.
|
||||
|
||||
The package descriptions can be used
|
||||
[to create and install packages](https://wiki.archlinux.org/title/Arch_User_Repository#Getting_started).
|
||||
Each package places lldap's configuration file at `/etc/lldap.toml` and offers
|
||||
[systemd service](https://wiki.archlinux.org/title/systemd#Using_units)
|
||||
`lldap.service` to (auto-)start and stop lldap.
|
||||
|
||||
### From source
|
||||
|
||||
#### Backend
|
||||
@@ -229,13 +172,15 @@ just run `cargo run -- run` to run the server.
|
||||
#### Frontend
|
||||
|
||||
To bring up the server, you'll need to compile the frontend. In addition to
|
||||
`cargo`, you'll need WASM-pack, which can be installed by running `cargo install wasm-pack`.
|
||||
`cargo`, you'll need:
|
||||
|
||||
- WASM-pack: `cargo install wasm-pack`
|
||||
|
||||
Then you can build the frontend files with
|
||||
|
||||
```shell
|
||||
./app/build.sh
|
||||
```
|
||||
````
|
||||
|
||||
(you'll need to run this after every front-end change to update the WASM
|
||||
package served).
|
||||
@@ -270,47 +215,6 @@ You can then get the compiled server binary in
|
||||
Raspberry Pi (or other target), with the folder structure maintained (`app`
|
||||
files in an `app` folder next to the binary).
|
||||
|
||||
## Usage
|
||||
|
||||
The simplest way to use LLDAP is through the web front-end. There you can
|
||||
create users, set passwords, add them to groups and so on. Users can also
|
||||
connect to the web UI and change their information, or request a password reset
|
||||
link (if you configured the SMTP client).
|
||||
|
||||
Creating and managing custom attributes is currently in Beta. It's not
|
||||
supported in the Web UI. The recommended way is to use
|
||||
[Zepmann/lldap-cli](https://github.com/Zepmann/lldap-cli), a
|
||||
community-contributed CLI frontend.
|
||||
|
||||
LLDAP is also very scriptable, through its GraphQL API. See the
|
||||
[Scripting](docs/scripting.md) docs for more info.
|
||||
|
||||
### Recommended architecture
|
||||
|
||||
If you are using containers, a sample architecture could look like this:
|
||||
|
||||
- A reverse proxy (e.g. nginx or Traefik)
|
||||
- An authentication service (e.g. Authelia, Authentik or KeyCloak) connected to
|
||||
LLDAP to provide authentication for non-authenticated services, or to provide
|
||||
SSO with compatible ones.
|
||||
- The LLDAP service, with the web port exposed to Traefik.
|
||||
- The LDAP port doesn't need to be exposed, since only the other containers
|
||||
will access it.
|
||||
- You can also set up LDAPS if you want to expose the LDAP port to the
|
||||
internet (not recommended) or for an extra layer of security in the
|
||||
inter-container communication (though it's very much optional).
|
||||
- The default LLDAP container starts up as root to fix up some files'
|
||||
permissions before downgrading the privilege to the given user. However,
|
||||
you can (should?) use the `*-rootless` version of the images to be able to
|
||||
start directly as that user, once you got the permissions right. Just don't
|
||||
forget to change from the `UID/GID` env vars to the `uid` docker-compose
|
||||
field.
|
||||
- Any other service that needs to connect to LLDAP for authentication (e.g.
|
||||
NextCloud) can be added to a shared network with LLDAP. The finest
|
||||
granularity is a network for each pair of LLDAP-service, but there are often
|
||||
coarser granularities that make sense (e.g. a network for the \*arr stack and
|
||||
LLDAP).
|
||||
|
||||
## Client configuration
|
||||
|
||||
### Compatible services
|
||||
@@ -350,7 +254,6 @@ folder for help with:
|
||||
|
||||
- [Airsonic Advanced](example_configs/airsonic-advanced.md)
|
||||
- [Apache Guacamole](example_configs/apacheguacamole.md)
|
||||
- [Apereo CAS Server](example_configs/apereo_cas_server.md)
|
||||
- [Authelia](example_configs/authelia_config.yml)
|
||||
- [Authentik](example_configs/authentik.md)
|
||||
- [Bookstack](example_configs/bookstack.env.example)
|
||||
@@ -361,65 +264,27 @@ folder for help with:
|
||||
- [Dolibarr](example_configs/dolibarr.md)
|
||||
- [Ejabberd](example_configs/ejabberd.md)
|
||||
- [Emby](example_configs/emby.md)
|
||||
- [Ergo IRCd](example_configs/ergo.md)
|
||||
- [Gitea](example_configs/gitea.md)
|
||||
- [GitLab](example_configs/gitlab.md)
|
||||
- [Grafana](example_configs/grafana_ldap_config.toml)
|
||||
- [Grocy](example_configs/grocy.md)
|
||||
- [Hedgedoc](example_configs/hedgedoc.md)
|
||||
- [Home Assistant](example_configs/home-assistant.md)
|
||||
- [Jellyfin](example_configs/jellyfin.md)
|
||||
- [Jenkins](example_configs/jenkins.md)
|
||||
- [Jitsi Meet](example_configs/jitsi_meet.conf)
|
||||
- [Kasm](example_configs/kasm.md)
|
||||
- [KeyCloak](example_configs/keycloak.md)
|
||||
- [LibreNMS](example_configs/librenms.md)
|
||||
- [Mastodon](example_configs/mastodon.env.example)
|
||||
- [Matrix](example_configs/matrix_synapse.yml)
|
||||
- [Mealie](example_configs/mealie.md)
|
||||
- [MinIO](example_configs/minio.md)
|
||||
- [Nextcloud](example_configs/nextcloud.md)
|
||||
- [Nexus](example_configs/nexus.md)
|
||||
- [Organizr](example_configs/Organizr.md)
|
||||
- [Portainer](example_configs/portainer.md)
|
||||
- [PowerDNS Admin](example_configs/powerdns_admin.md)
|
||||
- [Proxmox VE](example_configs/proxmox.md)
|
||||
- [Rancher](example_configs/rancher.md)
|
||||
- [Seafile](example_configs/seafile.md)
|
||||
- [Shaarli](example_configs/shaarli.md)
|
||||
- [Squid](example_configs/squid.md)
|
||||
- [Syncthing](example_configs/syncthing.md)
|
||||
- [TheLounge](example_configs/thelounge.md)
|
||||
- [Traccar](example_configs/traccar.xml)
|
||||
- [Vaultwarden](example_configs/vaultwarden.md)
|
||||
- [WeKan](example_configs/wekan.md)
|
||||
- [WG Portal](example_configs/wg_portal.env.example)
|
||||
- [WikiJS](example_configs/wikijs.md)
|
||||
- [XBackBone](example_configs/xbackbone_config.php)
|
||||
- [Zendto](example_configs/zendto.md)
|
||||
- [Zitadel](example_configs/zitadel.md)
|
||||
- [Zulip](example_configs/zulip.md)
|
||||
|
||||
### Incompatible services
|
||||
|
||||
Though we try to be maximally compatible, not every feature is supported; LLDAP
|
||||
is not a fully-featured LDAP server, intentionally so.
|
||||
|
||||
LDAP browsing tools are generally not supported, though they could be. If you
|
||||
need to use one but it behaves weirdly, please file a bug.
|
||||
|
||||
Some services use features that are not implemented, or require specific
|
||||
attributes. You can try to create those attributes (see custom attributes in
|
||||
the [Usage](#usage) section).
|
||||
|
||||
Finally, some services require password hashes so they can validate themselves
|
||||
the user's password without contacting LLDAP. This is not and will not be
|
||||
supported, it's incompatible with our password hashing scheme (a zero-knowledge
|
||||
proof). Furthermore, it's generally not recommended in terms of security, since
|
||||
it duplicates the places from which a password hash could leak.
|
||||
|
||||
In that category, the most prominent is Synology. It is, to date, the only
|
||||
service that seems definitely incompatible with LLDAP.
|
||||
|
||||
## Migrating from SQLite
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ homepage = "https://github.com/lldap/lldap"
|
||||
license = "GPL-3.0-only"
|
||||
name = "lldap_app"
|
||||
repository = "https://github.com/lldap/lldap"
|
||||
version = "0.5.1-alpha"
|
||||
version = "0.5.0-alpha"
|
||||
include = ["src/**/*", "queries/**/*", "Cargo.toml", "../schema.graphql"]
|
||||
|
||||
[dependencies]
|
||||
@@ -14,7 +14,7 @@ anyhow = "1"
|
||||
base64 = "0.13"
|
||||
gloo-console = "0.2.3"
|
||||
gloo-file = "0.2.3"
|
||||
gloo-net = "*"
|
||||
gloo-net = "0.2"
|
||||
graphql_client = "0.10"
|
||||
http = "0.2"
|
||||
jwt = "0.13"
|
||||
@@ -23,9 +23,9 @@ serde = "1"
|
||||
serde_json = "1"
|
||||
url-escape = "0.1.1"
|
||||
validator = "=0.14"
|
||||
validator_derive = "*"
|
||||
validator_derive = "0.16"
|
||||
wasm-bindgen = "0.2"
|
||||
wasm-bindgen-futures = "*"
|
||||
wasm-bindgen-futures = "0.4"
|
||||
yew = "0.19.3"
|
||||
yew-router = "0.16"
|
||||
|
||||
@@ -47,13 +47,13 @@ features = [
|
||||
]
|
||||
|
||||
[dependencies.chrono]
|
||||
version = "*"
|
||||
version = "0.4"
|
||||
features = [
|
||||
"wasmbind"
|
||||
]
|
||||
|
||||
[dependencies.lldap_auth]
|
||||
path = "../auth"
|
||||
version = "0.3"
|
||||
features = [ "opaque_client" ]
|
||||
|
||||
[dependencies.image]
|
||||
|
||||
@@ -4,8 +4,7 @@
|
||||
<head>
|
||||
<meta charset="utf-8" />
|
||||
<title>LLDAP Administration</title>
|
||||
<base href="/">
|
||||
<script src="static/main.js" type="module" defer></script>
|
||||
<script src="/static/main.js" type="module" defer></script>
|
||||
<link
|
||||
href="https://cdn.jsdelivr.net/npm/bootstrap-dark-5@1.1.3/dist/css/bootstrap-nightshade.min.css"
|
||||
rel="preload stylesheet"
|
||||
@@ -16,8 +15,8 @@
|
||||
src="https://cdn.jsdelivr.net/npm/bootstrap@5.1.1/dist/js/bootstrap.bundle.min.js"
|
||||
integrity="sha384-/bQdsTh/da6pkI1MST/rWKFNjaCP5gBSY4sEBT38Q/9RBh9AH40zEOg7Hlq2THRZ"
|
||||
crossorigin="anonymous"></script>
|
||||
<script
|
||||
src="https://cdn.jsdelivr.net/npm/bootstrap-dark-5@1.1.3/dist/js/darkmode.min.js"
|
||||
<script
|
||||
src="https://cdn.jsdelivr.net/npm/bootstrap-dark-5@1.1.3/dist/js/darkmode.min.js"
|
||||
integrity="sha384-A4SLs39X/aUfwRclRaXvNeXNBTLZdnZdHhhteqbYFS2jZTRD79tKeFeBn7SGXNpi"
|
||||
crossorigin="anonymous"></script>
|
||||
<link
|
||||
@@ -34,7 +33,7 @@
|
||||
href="https://fonts.googleapis.com/css2?family=Bebas+Neue&display=swap" />
|
||||
<link
|
||||
rel="stylesheet"
|
||||
href="static/style.css" />
|
||||
href="/static/style.css" />
|
||||
<script>
|
||||
function inDarkMode(){
|
||||
return darkmode.inDarkMode;
|
||||
@@ -44,23 +43,6 @@
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<noscript>
|
||||
<!-- This will be displayed if the user doesn't have JavaScript enabled. -->
|
||||
LLDAP requires JavaScript, please switch to a compatible browser or
|
||||
enable it.
|
||||
</noscript>
|
||||
|
||||
<script>
|
||||
/* Detect if the user has WASM support. */
|
||||
if (typeof WebAssembly === 'undefined') {
|
||||
const pWASMMsg = document.createElement("p")
|
||||
pWASMMsg.innerHTML = `
|
||||
LLDAP requires WASM and JIT for JavaScript, please switch to a
|
||||
compatible browser or enable it.
|
||||
`
|
||||
document.body.appendChild(pWASMMsg)
|
||||
}
|
||||
</script>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
|
||||
@@ -13,8 +13,8 @@
|
||||
<script
|
||||
src="/static/bootstrap.bundle.min.js"
|
||||
integrity="sha384-/bQdsTh/da6pkI1MST/rWKFNjaCP5gBSY4sEBT38Q/9RBh9AH40zEOg7Hlq2THRZ"></script>
|
||||
<script
|
||||
src="/static/darkmode.min.js"
|
||||
<script
|
||||
src="/static/darkmode.min.js"
|
||||
integrity="sha384-A4SLs39X/aUfwRclRaXvNeXNBTLZdnZdHhhteqbYFS2jZTRD79tKeFeBn7SGXNpi"></script>
|
||||
<link
|
||||
rel="stylesheet"
|
||||
@@ -40,23 +40,6 @@
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<noscript>
|
||||
<!-- This will be displayed if the user doesn't have JavaScript enabled. -->
|
||||
LLDAP requires JavaScript, please switch to a compatible browser or
|
||||
enable it.
|
||||
</noscript>
|
||||
|
||||
<script>
|
||||
/* Detect if the user has WASM support. */
|
||||
if (typeof WebAssembly === 'undefined') {
|
||||
const pWASMMsg = document.createElement("p")
|
||||
pWASMMsg.innerHTML = `
|
||||
LLDAP requires WASM and JIT for JavaScript, please switch to a
|
||||
compatible browser or enable it.
|
||||
`
|
||||
document.body.appendChild(pWASMMsg)
|
||||
}
|
||||
</script>
|
||||
</body>
|
||||
|
||||
</html>
|
||||
|
||||
@@ -268,7 +268,7 @@ impl App {
|
||||
<header class="p-2 mb-3 border-bottom">
|
||||
<div class="container">
|
||||
<div class="d-flex flex-wrap align-items-center justify-content-center justify-content-lg-start">
|
||||
<a href={yew_router::utils::base_url().unwrap_or("/".to_string())} class="d-flex align-items-center mt-2 mb-lg-0 me-md-5 text-decoration-none">
|
||||
<a href="/" class="d-flex align-items-center mt-2 mb-lg-0 me-md-5 text-decoration-none">
|
||||
<h2>{"LLDAP"}</h2>
|
||||
</a>
|
||||
|
||||
@@ -355,7 +355,7 @@ impl App {
|
||||
<span>{format!("LLDAP version {}", env!("CARGO_PKG_VERSION"))}</span>
|
||||
</div>
|
||||
<div>
|
||||
<a href="https://github.com/lldap/lldap" class="me-4 text-reset">
|
||||
<a href="https://github.com/nitnelave/lldap" class="me-4 text-reset">
|
||||
<i class="bi-github"></i>
|
||||
</a>
|
||||
<a href="https://discord.gg/h5PEdRMNyP" class="me-4 text-reset">
|
||||
@@ -366,7 +366,7 @@ impl App {
|
||||
</a>
|
||||
</div>
|
||||
<div>
|
||||
<span>{"License "}<a href="https://github.com/lldap/lldap/blob/main/LICENSE" class="link-secondary">{"GNU GPL"}</a></span>
|
||||
<span>{"License "}<a href="https://github.com/nitnelave/lldap/blob/main/LICENSE" class="link-secondary">{"GNU GPL"}</a></span>
|
||||
</div>
|
||||
</footer>
|
||||
}
|
||||
|
||||
@@ -97,7 +97,7 @@ impl CommonComponent<ChangePasswordForm> for ChangePasswordForm {
|
||||
.context("Could not initialize login")?;
|
||||
self.opaque_data = OpaqueData::Login(login_start_request.state);
|
||||
let req = login::ClientLoginStartRequest {
|
||||
username: ctx.props().username.clone().into(),
|
||||
username: ctx.props().username.clone(),
|
||||
login_start_request: login_start_request.message,
|
||||
};
|
||||
self.common.call_backend(
|
||||
@@ -128,13 +128,11 @@ impl CommonComponent<ChangePasswordForm> for ChangePasswordForm {
|
||||
Msg::SubmitNewPassword => {
|
||||
let mut rng = rand::rngs::OsRng;
|
||||
let new_password = self.form.model().password;
|
||||
let registration_start_request = opaque::client::registration::start_registration(
|
||||
new_password.as_bytes(),
|
||||
&mut rng,
|
||||
)
|
||||
.context("Could not initiate password change")?;
|
||||
let registration_start_request =
|
||||
opaque::client::registration::start_registration(&new_password, &mut rng)
|
||||
.context("Could not initiate password change")?;
|
||||
let req = registration::ClientRegistrationStartRequest {
|
||||
username: ctx.props().username.clone().into(),
|
||||
username: ctx.props().username.clone(),
|
||||
registration_start_request: registration_start_request.message,
|
||||
};
|
||||
self.opaque_data = OpaqueData::Registration(registration_start_request.state);
|
||||
|
||||
@@ -90,7 +90,6 @@ impl CommonComponent<CreateUserForm> for CreateUserForm {
|
||||
firstName: to_option(model.first_name),
|
||||
lastName: to_option(model.last_name),
|
||||
avatar: None,
|
||||
attributes: None,
|
||||
},
|
||||
};
|
||||
self.common.call_graphql::<CreateUser, _>(
|
||||
@@ -118,12 +117,9 @@ impl CommonComponent<CreateUserForm> for CreateUserForm {
|
||||
let opaque::client::registration::ClientRegistrationStartResult {
|
||||
state,
|
||||
message,
|
||||
} = opaque::client::registration::start_registration(
|
||||
password.as_bytes(),
|
||||
&mut rng,
|
||||
)?;
|
||||
} = opaque::client::registration::start_registration(&password, &mut rng)?;
|
||||
let req = registration::ClientRegistrationStartRequest {
|
||||
username: user_id.into(),
|
||||
username: user_id,
|
||||
registration_start_request: message,
|
||||
};
|
||||
self.common
|
||||
@@ -238,7 +234,7 @@ impl Component for CreateUserForm {
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group row mb-3">
|
||||
<label for="display_name"
|
||||
<label for="display-name"
|
||||
class="form-label col-4 col-form-label">
|
||||
{"Display name:"}
|
||||
</label>
|
||||
@@ -257,7 +253,7 @@ impl Component for CreateUserForm {
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group row mb-3">
|
||||
<label for="first_name"
|
||||
<label for="first-name"
|
||||
class="form-label col-4 col-form-label">
|
||||
{"First name:"}
|
||||
</label>
|
||||
@@ -276,7 +272,7 @@ impl Component for CreateUserForm {
|
||||
</div>
|
||||
</div>
|
||||
<div class="form-group row mb-3">
|
||||
<label for="last_name"
|
||||
<label for="last-name"
|
||||
class="form-label col-4 col-form-label">
|
||||
{"Last name:"}
|
||||
</label>
|
||||
|
||||
@@ -66,7 +66,7 @@ impl CommonComponent<LoginForm> for LoginForm {
|
||||
opaque::client::login::start_login(&password, &mut rng)
|
||||
.context("Could not initialize login")?;
|
||||
let req = login::ClientLoginStartRequest {
|
||||
username: username.into(),
|
||||
username,
|
||||
login_start_request: message,
|
||||
};
|
||||
self.common
|
||||
|
||||
@@ -65,10 +65,10 @@ impl CommonComponent<ResetPasswordStep2Form> for ResetPasswordStep2Form {
|
||||
let mut rng = rand::rngs::OsRng;
|
||||
let new_password = self.form.model().password;
|
||||
let registration_start_request =
|
||||
opaque_registration::start_registration(new_password.as_bytes(), &mut rng)
|
||||
opaque_registration::start_registration(&new_password, &mut rng)
|
||||
.context("Could not initiate password change")?;
|
||||
let req = registration::ClientRegistrationStartRequest {
|
||||
username: self.username.as_ref().unwrap().into(),
|
||||
username: self.username.clone().unwrap(),
|
||||
registration_start_request: registration_start_request.message,
|
||||
};
|
||||
self.opaque_data = Some(registration_start_request.state);
|
||||
|
||||
@@ -23,7 +23,10 @@ struct JsFile {
|
||||
|
||||
impl ToString for JsFile {
|
||||
fn to_string(&self) -> String {
|
||||
self.file.as_ref().map(File::name).unwrap_or_default()
|
||||
self.file
|
||||
.as_ref()
|
||||
.map(File::name)
|
||||
.unwrap_or_else(String::new)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -64,8 +67,7 @@ pub struct UpdateUser;
|
||||
pub struct UserDetailsForm {
|
||||
common: CommonComponentParts<Self>,
|
||||
form: yew_form::Form<UserModel>,
|
||||
// None means that the avatar hasn't changed.
|
||||
avatar: Option<JsFile>,
|
||||
avatar: JsFile,
|
||||
reader: Option<FileReader>,
|
||||
/// True if we just successfully updated the user, to display a success message.
|
||||
just_updated: bool,
|
||||
@@ -79,8 +81,6 @@ pub enum Msg {
|
||||
FileSelected(File),
|
||||
/// The "Submit" button was clicked.
|
||||
SubmitClicked,
|
||||
/// The "Clear" button for the avatar was clicked.
|
||||
ClearAvatarClicked,
|
||||
/// A picked file finished loading.
|
||||
FileLoaded(String, Result<Vec<u8>>),
|
||||
/// We got the response from the server about our update message.
|
||||
@@ -102,12 +102,7 @@ impl CommonComponent<UserDetailsForm> for UserDetailsForm {
|
||||
match msg {
|
||||
Msg::Update => Ok(true),
|
||||
Msg::FileSelected(new_avatar) => {
|
||||
if self
|
||||
.avatar
|
||||
.as_ref()
|
||||
.and_then(|f| f.file.as_ref().map(|f| f.name()))
|
||||
!= Some(new_avatar.name())
|
||||
{
|
||||
if self.avatar.file.as_ref().map(|f| f.name()) != Some(new_avatar.name()) {
|
||||
let file_name = new_avatar.name();
|
||||
let link = ctx.link().clone();
|
||||
self.reader = Some(read_as_bytes(&new_avatar, move |res| {
|
||||
@@ -116,32 +111,26 @@ impl CommonComponent<UserDetailsForm> for UserDetailsForm {
|
||||
res.map_err(|e| anyhow::anyhow!("{:#}", e)),
|
||||
))
|
||||
}));
|
||||
self.avatar = Some(JsFile {
|
||||
self.avatar = JsFile {
|
||||
file: Some(new_avatar),
|
||||
contents: None,
|
||||
});
|
||||
};
|
||||
}
|
||||
Ok(true)
|
||||
}
|
||||
Msg::SubmitClicked => self.submit_user_update_form(ctx),
|
||||
Msg::ClearAvatarClicked => {
|
||||
self.avatar = Some(JsFile::default());
|
||||
Ok(true)
|
||||
}
|
||||
Msg::UserUpdated(response) => self.user_update_finished(response),
|
||||
Msg::FileLoaded(file_name, data) => {
|
||||
if let Some(avatar) = &mut self.avatar {
|
||||
if let Some(file) = &avatar.file {
|
||||
if file.name() == file_name {
|
||||
let data = data?;
|
||||
if !is_valid_jpeg(data.as_slice()) {
|
||||
// Clear the selection.
|
||||
self.avatar = None;
|
||||
bail!("Chosen image is not a valid JPEG");
|
||||
} else {
|
||||
avatar.contents = Some(data);
|
||||
return Ok(true);
|
||||
}
|
||||
if let Some(file) = &self.avatar.file {
|
||||
if file.name() == file_name {
|
||||
let data = data?;
|
||||
if !is_valid_jpeg(data.as_slice()) {
|
||||
// Clear the selection.
|
||||
self.avatar = JsFile::default();
|
||||
bail!("Chosen image is not a valid JPEG");
|
||||
} else {
|
||||
self.avatar.contents = Some(data);
|
||||
return Ok(true);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -170,7 +159,7 @@ impl Component for UserDetailsForm {
|
||||
Self {
|
||||
common: CommonComponentParts::<Self>::create(),
|
||||
form: yew_form::Form::new(model),
|
||||
avatar: None,
|
||||
avatar: JsFile::default(),
|
||||
just_updated: false,
|
||||
reader: None,
|
||||
user: ctx.props().user.clone(),
|
||||
@@ -186,13 +175,11 @@ impl Component for UserDetailsForm {
|
||||
type Field = yew_form::Field<UserModel>;
|
||||
let link = &ctx.link();
|
||||
|
||||
let avatar_string = match &self.avatar {
|
||||
Some(avatar) => {
|
||||
let avatar_base64 = to_base64(avatar);
|
||||
avatar_base64.as_deref().unwrap_or("").to_owned()
|
||||
}
|
||||
None => self.user.avatar.as_deref().unwrap_or("").to_owned(),
|
||||
};
|
||||
let avatar_base64 = maybe_to_base64(&self.avatar).unwrap_or_default();
|
||||
let avatar_string = avatar_base64
|
||||
.as_deref()
|
||||
.or(self.user.avatar.as_deref())
|
||||
.unwrap_or("");
|
||||
html! {
|
||||
<div class="py-3">
|
||||
<form class="form">
|
||||
@@ -304,7 +291,7 @@ impl Component for UserDetailsForm {
|
||||
</label>
|
||||
<div class="col-8">
|
||||
<div class="row align-items-center">
|
||||
<div class="col-5">
|
||||
<div class="col-8">
|
||||
<input
|
||||
class="form-control"
|
||||
id="avatarInput"
|
||||
@@ -315,27 +302,12 @@ impl Component for UserDetailsForm {
|
||||
Self::upload_files(input.files())
|
||||
})} />
|
||||
</div>
|
||||
<div class="col-3">
|
||||
<button
|
||||
class="btn btn-secondary col-auto"
|
||||
id="avatarClear"
|
||||
disabled={self.common.is_task_running()}
|
||||
onclick={link.callback(|e: MouseEvent| {e.prevent_default(); Msg::ClearAvatarClicked})}>
|
||||
{"Clear"}
|
||||
</button>
|
||||
</div>
|
||||
<div class="col-4">
|
||||
{
|
||||
if !avatar_string.is_empty() {
|
||||
html!{
|
||||
<img
|
||||
id="avatarDisplay"
|
||||
src={format!("data:image/jpeg;base64, {}", avatar_string)}
|
||||
style="max-height:128px;max-width:128px;height:auto;width:auto;"
|
||||
alt="Avatar" />
|
||||
}
|
||||
} else { html! {} }
|
||||
}
|
||||
<img
|
||||
id="avatarDisplay"
|
||||
src={format!("data:image/jpeg;base64, {}", avatar_string)}
|
||||
style="max-height:128px;max-width:128px;height:auto;width:auto;"
|
||||
alt="Avatar" />
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@@ -373,10 +345,10 @@ impl UserDetailsForm {
|
||||
if !self.form.validate() {
|
||||
bail!("Invalid inputs");
|
||||
}
|
||||
if let Some(JsFile {
|
||||
if let JsFile {
|
||||
file: Some(_),
|
||||
contents: None,
|
||||
}) = &self.avatar
|
||||
} = &self.avatar
|
||||
{
|
||||
bail!("Image file hasn't finished loading, try again");
|
||||
}
|
||||
@@ -388,8 +360,6 @@ impl UserDetailsForm {
|
||||
firstName: None,
|
||||
lastName: None,
|
||||
avatar: None,
|
||||
removeAttributes: None,
|
||||
insertAttributes: None,
|
||||
};
|
||||
let default_user_input = user_input.clone();
|
||||
let model = self.form.model();
|
||||
@@ -406,9 +376,7 @@ impl UserDetailsForm {
|
||||
if base_user.last_name != model.last_name {
|
||||
user_input.lastName = Some(model.last_name);
|
||||
}
|
||||
if let Some(avatar) = &self.avatar {
|
||||
user_input.avatar = Some(to_base64(avatar)?);
|
||||
}
|
||||
user_input.avatar = maybe_to_base64(&self.avatar)?;
|
||||
// Nothing changed.
|
||||
if user_input == default_user_input {
|
||||
return Ok(false);
|
||||
@@ -430,8 +398,8 @@ impl UserDetailsForm {
|
||||
self.user.display_name = model.display_name;
|
||||
self.user.first_name = model.first_name;
|
||||
self.user.last_name = model.last_name;
|
||||
if let Some(avatar) = &self.avatar {
|
||||
self.user.avatar = Some(to_base64(avatar)?);
|
||||
if let Some(avatar) = maybe_to_base64(&self.avatar)? {
|
||||
self.user.avatar = Some(avatar);
|
||||
}
|
||||
self.just_updated = true;
|
||||
Ok(true)
|
||||
@@ -456,12 +424,12 @@ fn is_valid_jpeg(bytes: &[u8]) -> bool {
|
||||
.is_ok()
|
||||
}
|
||||
|
||||
fn to_base64(file: &JsFile) -> Result<String> {
|
||||
fn maybe_to_base64(file: &JsFile) -> Result<Option<String>> {
|
||||
match file {
|
||||
JsFile {
|
||||
file: None,
|
||||
contents: _,
|
||||
} => Ok(String::new()),
|
||||
} => Ok(None),
|
||||
JsFile {
|
||||
file: Some(_),
|
||||
contents: None,
|
||||
@@ -473,7 +441,7 @@ fn to_base64(file: &JsFile) -> Result<String> {
|
||||
if !is_valid_jpeg(data.as_slice()) {
|
||||
bail!("Chosen image is not a valid JPEG");
|
||||
}
|
||||
Ok(base64::encode(data))
|
||||
Ok(Some(base64::encode(data)))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -18,10 +18,6 @@ fn get_claims_from_jwt(jwt: &str) -> Result<JWTClaims> {
|
||||
|
||||
const NO_BODY: Option<()> = None;
|
||||
|
||||
fn base_url() -> String {
|
||||
yew_router::utils::base_url().unwrap_or_default()
|
||||
}
|
||||
|
||||
async fn call_server(
|
||||
url: &str,
|
||||
body: Option<impl Serialize>,
|
||||
@@ -101,7 +97,7 @@ impl HostService {
|
||||
};
|
||||
let request_body = QueryType::build_query(variables);
|
||||
call_server_json_with_error_message::<graphql_client::Response<_>, _>(
|
||||
&(base_url() + "/api/graphql"),
|
||||
"/api/graphql",
|
||||
Some(request_body),
|
||||
error_message,
|
||||
)
|
||||
@@ -113,7 +109,7 @@ impl HostService {
|
||||
request: login::ClientLoginStartRequest,
|
||||
) -> Result<Box<login::ServerLoginStartResponse>> {
|
||||
call_server_json_with_error_message(
|
||||
&(base_url() + "/auth/opaque/login/start"),
|
||||
"/auth/opaque/login/start",
|
||||
Some(request),
|
||||
"Could not start authentication: ",
|
||||
)
|
||||
@@ -122,7 +118,7 @@ impl HostService {
|
||||
|
||||
pub async fn login_finish(request: login::ClientLoginFinishRequest) -> Result<(String, bool)> {
|
||||
call_server_json_with_error_message::<login::ServerLoginResponse, _>(
|
||||
&(base_url() + "/auth/opaque/login/finish"),
|
||||
"/auth/opaque/login/finish",
|
||||
Some(request),
|
||||
"Could not finish authentication",
|
||||
)
|
||||
@@ -134,7 +130,7 @@ impl HostService {
|
||||
request: registration::ClientRegistrationStartRequest,
|
||||
) -> Result<Box<registration::ServerRegistrationStartResponse>> {
|
||||
call_server_json_with_error_message(
|
||||
&(base_url() + "/auth/opaque/register/start"),
|
||||
"/auth/opaque/register/start",
|
||||
Some(request),
|
||||
"Could not start registration: ",
|
||||
)
|
||||
@@ -145,7 +141,7 @@ impl HostService {
|
||||
request: registration::ClientRegistrationFinishRequest,
|
||||
) -> Result<()> {
|
||||
call_server_empty_response_with_error_message(
|
||||
&(base_url() + "/auth/opaque/register/finish"),
|
||||
"/auth/opaque/register/finish",
|
||||
Some(request),
|
||||
"Could not finish registration",
|
||||
)
|
||||
@@ -154,7 +150,7 @@ impl HostService {
|
||||
|
||||
pub async fn refresh() -> Result<(String, bool)> {
|
||||
call_server_json_with_error_message::<login::ServerLoginResponse, _>(
|
||||
&(base_url() + "/auth/refresh"),
|
||||
"/auth/refresh",
|
||||
NO_BODY,
|
||||
"Could not start authentication: ",
|
||||
)
|
||||
@@ -164,21 +160,13 @@ impl HostService {
|
||||
|
||||
// The `_request` parameter is to make it the same shape as the other functions.
|
||||
pub async fn logout() -> Result<()> {
|
||||
call_server_empty_response_with_error_message(
|
||||
&(base_url() + "/auth/logout"),
|
||||
NO_BODY,
|
||||
"Could not logout",
|
||||
)
|
||||
.await
|
||||
call_server_empty_response_with_error_message("/auth/logout", NO_BODY, "Could not logout")
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn reset_password_step1(username: String) -> Result<()> {
|
||||
call_server_empty_response_with_error_message(
|
||||
&format!(
|
||||
"{}/auth/reset/step1/{}",
|
||||
base_url(),
|
||||
url_escape::encode_query(&username)
|
||||
),
|
||||
&format!("/auth/reset/step1/{}", url_escape::encode_query(&username)),
|
||||
NO_BODY,
|
||||
"Could not initiate password reset",
|
||||
)
|
||||
@@ -189,7 +177,7 @@ impl HostService {
|
||||
token: String,
|
||||
) -> Result<lldap_auth::password_reset::ServerPasswordResetResponse> {
|
||||
call_server_json_with_error_message(
|
||||
&format!("{}/auth/reset/step2/{}", base_url(), token),
|
||||
&format!("/auth/reset/step2/{}", token),
|
||||
NO_BODY,
|
||||
"Could not validate token",
|
||||
)
|
||||
@@ -197,13 +185,13 @@ impl HostService {
|
||||
}
|
||||
|
||||
pub async fn probe_password_reset() -> Result<bool> {
|
||||
Ok(gloo_net::http::Request::get(
|
||||
&(base_url() + "/auth/reset/step1/lldap_unlikely_very_long_user_name"),
|
||||
Ok(
|
||||
gloo_net::http::Request::get("/auth/reset/step1/lldap_unlikely_very_long_user_name")
|
||||
.header("Content-Type", "application/json")
|
||||
.send()
|
||||
.await?
|
||||
.status()
|
||||
!= http::StatusCode::NOT_FOUND,
|
||||
)
|
||||
.header("Content-Type", "application/json")
|
||||
.send()
|
||||
.await?
|
||||
.status()
|
||||
!= http::StatusCode::NOT_FOUND)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -22,11 +22,10 @@ pub fn set_cookie(cookie_name: &str, value: &str, expiration: &DateTime<Utc>) ->
|
||||
.map_err(|_| anyhow!("Document is not an HTMLDocument"))
|
||||
})?;
|
||||
let cookie_string = format!(
|
||||
"{}={}; expires={}; sameSite=Strict; path={}/",
|
||||
"{}={}; expires={}; sameSite=Strict; path=/",
|
||||
cookie_name,
|
||||
value,
|
||||
expiration.to_rfc2822(),
|
||||
yew_router::utils::base_url().unwrap_or_default()
|
||||
expiration.to_rfc2822()
|
||||
);
|
||||
doc.set_cookie(&cookie_string)
|
||||
.map_err(|_| anyhow!("Could not set cookie"))
|
||||
|
||||
@@ -6,14 +6,13 @@ homepage = "https://github.com/lldap/lldap"
|
||||
license = "GPL-3.0-only"
|
||||
name = "lldap_auth"
|
||||
repository = "https://github.com/lldap/lldap"
|
||||
version = "0.4.0"
|
||||
version = "0.3.0"
|
||||
|
||||
[features]
|
||||
default = ["opaque_server", "opaque_client"]
|
||||
opaque_server = []
|
||||
opaque_client = []
|
||||
js = []
|
||||
sea_orm = ["dep:sea-orm"]
|
||||
|
||||
[dependencies]
|
||||
rust-argon2 = "0.8"
|
||||
@@ -21,27 +20,21 @@ curve25519-dalek = "3"
|
||||
digest = "0.9"
|
||||
generic-array = "0.14"
|
||||
rand = "0.8"
|
||||
serde = "*"
|
||||
serde = "1"
|
||||
sha2 = "0.9"
|
||||
thiserror = "*"
|
||||
thiserror = "1"
|
||||
|
||||
[dependencies.opaque-ke]
|
||||
version = "0.6"
|
||||
|
||||
[dependencies.chrono]
|
||||
version = "*"
|
||||
version = "0.4"
|
||||
features = [ "serde" ]
|
||||
|
||||
[dependencies.sea-orm]
|
||||
version= "0.12"
|
||||
default-features = false
|
||||
features = ["macros"]
|
||||
optional = true
|
||||
|
||||
# For WASM targets, use the JS getrandom.
|
||||
[target.'cfg(not(target_arch = "wasm32"))'.dependencies.getrandom]
|
||||
version = "0.2"
|
||||
features = ["js"]
|
||||
|
||||
[target.'cfg(target_arch = "wasm32")'.dependencies.getrandom]
|
||||
version = "0.2"
|
||||
features = ["js"]
|
||||
|
||||
110
auth/src/lib.rs
110
auth/src/lib.rs
@@ -9,17 +9,17 @@ pub mod opaque;
|
||||
|
||||
/// The messages for the 3-step OPAQUE and simple login process.
|
||||
pub mod login {
|
||||
use super::{types::UserId, *};
|
||||
use super::*;
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
pub struct ServerData {
|
||||
pub username: UserId,
|
||||
pub username: String,
|
||||
pub server_login: opaque::server::login::ServerLogin,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
pub struct ClientLoginStartRequest {
|
||||
pub username: UserId,
|
||||
pub username: String,
|
||||
pub login_start_request: opaque::server::login::CredentialRequest,
|
||||
}
|
||||
|
||||
@@ -39,14 +39,14 @@ pub mod login {
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
pub struct ClientSimpleLoginRequest {
|
||||
pub username: UserId,
|
||||
pub username: String,
|
||||
pub password: String,
|
||||
}
|
||||
|
||||
impl fmt::Debug for ClientSimpleLoginRequest {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
f.debug_struct("ClientSimpleLoginRequest")
|
||||
.field("username", &self.username.as_str())
|
||||
.field("username", &self.username)
|
||||
.field("password", &"***********")
|
||||
.finish()
|
||||
}
|
||||
@@ -63,16 +63,16 @@ pub mod login {
|
||||
/// The messages for the 3-step OPAQUE registration process.
|
||||
/// It is used to reset a user's password.
|
||||
pub mod registration {
|
||||
use super::{types::UserId, *};
|
||||
use super::*;
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
pub struct ServerData {
|
||||
pub username: UserId,
|
||||
pub username: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Clone)]
|
||||
pub struct ClientRegistrationStartRequest {
|
||||
pub username: UserId,
|
||||
pub username: String,
|
||||
pub registration_start_request: opaque::server::registration::RegistrationRequest,
|
||||
}
|
||||
|
||||
@@ -104,100 +104,6 @@ pub mod password_reset {
|
||||
}
|
||||
}
|
||||
|
||||
pub mod types {
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[cfg(feature = "sea_orm")]
|
||||
use sea_orm::{DbErr, DeriveValueType, QueryResult, TryFromU64, Value};
|
||||
|
||||
#[derive(
|
||||
PartialEq, Eq, PartialOrd, Ord, Clone, Debug, Default, Hash, Serialize, Deserialize,
|
||||
)]
|
||||
#[cfg_attr(feature = "sea_orm", derive(DeriveValueType))]
|
||||
#[serde(from = "String")]
|
||||
pub struct CaseInsensitiveString(String);
|
||||
|
||||
impl CaseInsensitiveString {
|
||||
pub fn new(s: &str) -> Self {
|
||||
Self(s.to_ascii_lowercase())
|
||||
}
|
||||
|
||||
pub fn as_str(&self) -> &str {
|
||||
self.0.as_str()
|
||||
}
|
||||
|
||||
pub fn into_string(self) -> String {
|
||||
self.0
|
||||
}
|
||||
}
|
||||
|
||||
impl From<String> for CaseInsensitiveString {
|
||||
fn from(mut s: String) -> Self {
|
||||
s.make_ascii_lowercase();
|
||||
Self(s)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&String> for CaseInsensitiveString {
|
||||
fn from(s: &String) -> Self {
|
||||
Self::new(s.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
impl From<&str> for CaseInsensitiveString {
|
||||
fn from(s: &str) -> Self {
|
||||
Self::new(s)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(
|
||||
PartialEq, Eq, PartialOrd, Ord, Clone, Debug, Default, Hash, Serialize, Deserialize,
|
||||
)]
|
||||
#[cfg_attr(feature = "sea_orm", derive(DeriveValueType))]
|
||||
#[serde(from = "CaseInsensitiveString")]
|
||||
pub struct UserId(CaseInsensitiveString);
|
||||
|
||||
impl UserId {
|
||||
pub fn new(s: &str) -> Self {
|
||||
s.into()
|
||||
}
|
||||
pub fn as_str(&self) -> &str {
|
||||
self.0.as_str()
|
||||
}
|
||||
pub fn into_string(self) -> String {
|
||||
self.0.into_string()
|
||||
}
|
||||
}
|
||||
impl<T> From<T> for UserId
|
||||
where
|
||||
T: Into<CaseInsensitiveString>,
|
||||
{
|
||||
fn from(s: T) -> Self {
|
||||
Self(s.into())
|
||||
}
|
||||
}
|
||||
impl std::fmt::Display for UserId {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
write!(f, "{}", self.0.as_str())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(feature = "sea_orm")]
|
||||
impl From<&UserId> for Value {
|
||||
fn from(user_id: &UserId) -> Self {
|
||||
user_id.as_str().into()
|
||||
}
|
||||
}
|
||||
#[cfg(feature = "sea_orm")]
|
||||
impl TryFromU64 for UserId {
|
||||
fn try_from_u64(_n: u64) -> Result<Self, DbErr> {
|
||||
Err(DbErr::ConvertFromU64(
|
||||
"UserId cannot be constructed from u64",
|
||||
))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize)]
|
||||
pub struct JWTClaims {
|
||||
pub exp: DateTime<Utc>,
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
use crate::types::UserId;
|
||||
use opaque_ke::ciphersuite::CipherSuite;
|
||||
use rand::{CryptoRng, RngCore};
|
||||
|
||||
@@ -78,10 +77,10 @@ pub mod client {
|
||||
pub use opaque_ke::ClientRegistrationFinishParameters;
|
||||
/// Initiate the registration negotiation.
|
||||
pub fn start_registration<R: RngCore + CryptoRng>(
|
||||
password: &[u8],
|
||||
password: &str,
|
||||
rng: &mut R,
|
||||
) -> AuthenticationResult<ClientRegistrationStartResult> {
|
||||
Ok(ClientRegistration::start(rng, password)?)
|
||||
Ok(ClientRegistration::start(rng, password.as_bytes())?)
|
||||
}
|
||||
|
||||
/// Finalize the registration negotiation.
|
||||
@@ -146,12 +145,12 @@ pub mod server {
|
||||
pub fn start_registration(
|
||||
server_setup: &ServerSetup,
|
||||
registration_request: RegistrationRequest,
|
||||
username: &UserId,
|
||||
username: &str,
|
||||
) -> AuthenticationResult<ServerRegistrationStartResult> {
|
||||
Ok(ServerRegistration::start(
|
||||
server_setup,
|
||||
registration_request,
|
||||
username.as_str().as_bytes(),
|
||||
username.as_bytes(),
|
||||
)?)
|
||||
}
|
||||
|
||||
@@ -179,14 +178,14 @@ pub mod server {
|
||||
server_setup: &ServerSetup,
|
||||
password_file: Option<ServerRegistration>,
|
||||
credential_request: CredentialRequest,
|
||||
username: &UserId,
|
||||
username: &str,
|
||||
) -> AuthenticationResult<ServerLoginStartResult> {
|
||||
Ok(ServerLogin::start(
|
||||
rng,
|
||||
server_setup,
|
||||
password_file,
|
||||
credential_request,
|
||||
username.as_str().as_bytes(),
|
||||
username.as_bytes(),
|
||||
ServerLoginStartParameters::default(),
|
||||
)?)
|
||||
}
|
||||
|
||||
@@ -1,20 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
CONFIG_FILE=/data/lldap_config.toml
|
||||
|
||||
if [ ! -f "$CONFIG_FILE" ]; then
|
||||
echo "[entrypoint] Copying the default config to $CONFIG_FILE"
|
||||
echo "[entrypoint] Edit this $CONFIG_FILE to configure LLDAP."
|
||||
if cp /app/lldap_config.docker_template.toml $CONFIG_FILE; then
|
||||
echo "Configuration copied successfully."
|
||||
else
|
||||
echo "Fail to copy configuration, check permission on /data or manually create one by copying from LLDAP repository"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "> Starting lldap.."
|
||||
echo ""
|
||||
exec /app/lldap "$@"
|
||||
exec "$@"
|
||||
@@ -20,7 +20,7 @@ LLDAP has a command that will connect to a target database and initialize the
|
||||
schema. If running with docker, run the following command to use your active
|
||||
instance (this has the benefit of ensuring your container has access):
|
||||
|
||||
```sh
|
||||
```
|
||||
docker exec -it <LLDAP container name> /app/lldap create_schema -d <Target database url>
|
||||
```
|
||||
|
||||
@@ -34,7 +34,7 @@ databases (SQLite in this example) will give an error if LLDAP is in the middle
|
||||
statements. There are various ways to do this, but a simple enough way is filtering a
|
||||
whole database dump. This repo contains [a script](/scripts/sqlite_dump_commands.sh) to generate SQLite commands for creating an appropriate dump:
|
||||
|
||||
```sh
|
||||
```
|
||||
./sqlite_dump_commands.sh | sqlite3 /path/to/lldap/config/users.db > /path/to/dump.sql
|
||||
```
|
||||
|
||||
@@ -49,22 +49,20 @@ a transaction in case one of the statements fail.
|
||||
PostgreSQL uses a different hex string format. The command below should switch SQLite
|
||||
format to PostgreSQL format, and wrap it all in a transaction:
|
||||
|
||||
```sh
|
||||
```
|
||||
sed -i -r -e "s/X'([[:xdigit:]]+'[^'])/'\\\x\\1/g" \
|
||||
-e ":a; s/(INSERT INTO (user_attribute_schema|jwt_storage)\(.*\) VALUES\(.*),1([^']*\);)$/\1,true\3/; s/(INSERT INTO (user_attribute_schema|jwt_storage)\(.*\) VALUES\(.*),0([^']*\);)$/\1,false\3/; ta" \
|
||||
-e '1s/^/BEGIN;\n/' \
|
||||
-e '$aSELECT setval(pg_get_serial_sequence('\''groups'\'', '\''group_id'\''), COALESCE((SELECT MAX(group_id) FROM groups), 1));' \
|
||||
-e '$aCOMMIT;' /path/to/dump.sql
|
||||
```
|
||||
|
||||
### To MySQL
|
||||
|
||||
MySQL mostly cooperates, but it gets some errors if you don't escape the `groups` table. It also uses
|
||||
backticks to escape table name instead of quotes. Run the
|
||||
backticks to escape table name instead of quotes. Run the
|
||||
following command to wrap all table names in backticks for good measure, and wrap the inserts in
|
||||
a transaction:
|
||||
|
||||
```sh
|
||||
```
|
||||
sed -i -r -e 's/^INSERT INTO "?([a-zA-Z0-9_]+)"?/INSERT INTO `\1`/' \
|
||||
-e '1s/^/START TRANSACTION;\n/' \
|
||||
-e '$aCOMMIT;' \
|
||||
@@ -76,7 +74,7 @@ sed -i -r -e 's/^INSERT INTO "?([a-zA-Z0-9_]+)"?/INSERT INTO `\1`/' \
|
||||
While MariaDB is supposed to be identical to MySQL, it doesn't support timezone offsets on DATETIME
|
||||
strings. Use the following command to remove those and perform the additional MySQL sanitization:
|
||||
|
||||
```sh
|
||||
```
|
||||
sed -i -r -e "s/([^']'[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]{9})\+00:00'([^'])/\1'\2/g" \
|
||||
-e 's/^INSERT INTO "?([a-zA-Z0-9_]+)"?/INSERT INTO `\1`/' \
|
||||
-e '1s/^/START TRANSACTION;\n/' \
|
||||
@@ -108,4 +106,4 @@ Modify your `database_url` in `lldap_config.toml` (or `LLDAP_DATABASE_URL` in th
|
||||
to point to your new database (the same value used when generating schema). Restart
|
||||
LLDAP and check the logs to ensure there were no errors.
|
||||
|
||||
#### More details/examples can be seen in the CI process [here](https://raw.githubusercontent.com/lldap/lldap/main/.github/workflows/docker-build-static.yml), look for the job `lldap-database-migration-test`
|
||||
#### More details/examples can be seen in the CI process [here](https://raw.githubusercontent.com/nitnelave/lldap/main/.github/workflows/docker-build-static.yml), look for the job `lldap-database-migration-test`
|
||||
|
||||
@@ -18,15 +18,6 @@ still supports basic RootDSE queries.
|
||||
|
||||
Anonymous bind is not supported.
|
||||
|
||||
## `lldap-cli`
|
||||
|
||||
There is a community-built CLI frontend,
|
||||
[Zepmann/lldap-cli](https://github.com/Zepmann/lldap-cli), that supports all
|
||||
(as of this writing) the operations possible. Getting information from the
|
||||
server, creating users, adding them to groups, creating new custom attributes
|
||||
and populating them, all of that is supported. It is currently the easiest way
|
||||
to script the interaction with LLDAP.
|
||||
|
||||
## GraphQL
|
||||
|
||||
The best way to interact with LLDAP programmatically is via the GraphQL
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
# Configuration for Apereo CAS Server
|
||||
|
||||
Replace `dc=example,dc=com` with your LLDAP configured domain, and hostname for your LLDAP server.
|
||||
|
||||
The `search-filter` provided here requires users to be members of the `cas_auth` group in LLDAP.
|
||||
|
||||
Configuration to use LDAP in e.g. `/etc/cas/config/standalone.yml`
|
||||
```
|
||||
cas:
|
||||
authn:
|
||||
ldap:
|
||||
- base-dn: dc=example,dc=com
|
||||
bind-credential: password
|
||||
bind-dn: uid=admin,ou=people,dc=example,dc=com
|
||||
ldap-url: ldap://ldap.example.com:3890
|
||||
search-filter: (&(objectClass=person)(memberOf=uid=cas_auth,ou=groups,dc=example,dc=com))
|
||||
```
|
||||
|
||||
@@ -33,7 +33,7 @@ authentication_backend:
|
||||
users_filter: "(&({username_attribute}={input})(objectClass=person))"
|
||||
# Set this to ou=groups, because all groups are stored in this ou
|
||||
additional_groups_dn: ou=groups
|
||||
# The groups are not displayed in the UI, but this filter works.
|
||||
# Only this filter is supported right now
|
||||
groups_filter: "(member={dn})"
|
||||
# The attribute holding the name of the group.
|
||||
group_name_attribute: cn
|
||||
|
||||
Binary file not shown.
|
Before Width: | Height: | Size: 51 KiB |
@@ -1,254 +0,0 @@
|
||||
# Bootstrapping lldap using [bootstrap.sh](bootstrap.sh) script
|
||||
|
||||
bootstrap.sh allows managing your lldap in a git-ops, declarative way using JSON config files.
|
||||
|
||||
The script can:
|
||||
|
||||
* create, update users
|
||||
* set/update all lldap built-in user attributes
|
||||
* add/remove users to/from corresponding groups
|
||||
* set/update user avatar from file, link or from gravatar by user email
|
||||
* set/update user password
|
||||
* create groups
|
||||
* delete redundant users and groups (when `DO_CLEANUP` env var is true)
|
||||
* maintain the desired state described in JSON config files
|
||||
|
||||
|
||||

|
||||
|
||||
## Required packages
|
||||
|
||||
> The script will automatically install the required packages for alpine and debian-based distributions
|
||||
> when run by root, or you can install them by yourself.
|
||||
|
||||
- curl
|
||||
- [jq](https://github.com/jqlang/jq)
|
||||
- [jo](https://github.com/jpmens/jo)
|
||||
|
||||
## Environment variables
|
||||
|
||||
- `LLDAP_URL` or `LLDAP_URL_FILE` - URL to your lldap instance or path to file that contains URL (**MANDATORY**)
|
||||
- `LLDAP_ADMIN_USERNAME` or `LLDAP_ADMIN_USERNAME_FILE` - admin username or path to file that contains username (**MANDATORY**)
|
||||
- `LLDAP_ADMIN_PASSWORD` or `LLDAP_ADMIN_PASSWORD_FILE` - admin password or path to file that contains password (**MANDATORY**)
|
||||
- `USER_CONFIGS_DIR` (default value: `/user-configs`) - directory where the user JSON configs could be found
|
||||
- `GROUP_CONFIGS_DIR` (default value: `/group-configs`) - directory where the group JSON configs could be found
|
||||
- `LLDAP_SET_PASSWORD_PATH` - path to the `lldap_set_password` utility (default value: `/app/lldap_set_password`)
|
||||
- `DO_CLEANUP` (default value: `false`) - delete groups and users not specified in config files, also remove users from groups that they do not belong to
|
||||
|
||||
## Config files
|
||||
|
||||
There are two types of config files: [group](#group-config-file-example) and [user](#user-config-file-example) configs.
|
||||
Each config file can be as one JSON file with nested JSON top-level values as several JSON files.
|
||||
|
||||
### Group config file example
|
||||
|
||||
Group configs are used to define groups that will be created by the script
|
||||
|
||||
Fields description:
|
||||
|
||||
* `name`: name of the group (**MANDATORY**)
|
||||
|
||||
```json
|
||||
{
|
||||
"name": "group-1"
|
||||
}
|
||||
{
|
||||
"name": "group-2"
|
||||
}
|
||||
```
|
||||
|
||||
### User config file example
|
||||
|
||||
User config defines all the lldap user structures,
|
||||
if the non-mandatory field is omitted, the script will clean this field in lldap as well.
|
||||
|
||||
Fields description:
|
||||
|
||||
* `id`: it's just username (**MANDATORY**)
|
||||
* `email`: self-explanatory (**MANDATORY**)
|
||||
* `password`: would be used to set the password using `lldap_set_password` utility
|
||||
* `displayName`: self-explanatory
|
||||
* `firstName`: self-explanatory
|
||||
* `lastName`: self-explanatory
|
||||
* `avatar_file`: must be a valid path to jpeg file (ignored if `avatar_url` specified)
|
||||
* `avatar_url`: must be a valid URL to jpeg file (ignored if `gravatar_avatar` specified)
|
||||
* `gravatar_avatar` (`false` by default): the script will try to get an avatar from [gravatar](https://gravatar.com/) by previously specified `email` (has the highest priority)
|
||||
* `weserv_avatar` (`false` by default): avatar file from `avatar_url` or `gravatar_avatar` would be converted to jpeg using [wsrv.nl](https://wsrv.nl) (useful when your avatar is png)
|
||||
* `groups`: an array of groups the user would be a member of (all the groups must be specified in group config files)
|
||||
|
||||
```json
|
||||
{
|
||||
"id": "username",
|
||||
"email": "username@example.com",
|
||||
"password": "changeme",
|
||||
"displayName": "Display Name",
|
||||
"firstName": "First",
|
||||
"lastName": "Last",
|
||||
"avatar_file": "/path/to/avatar.jpg",
|
||||
"avatar_url": "https://i.imgur.com/nbCxk3z.jpg",
|
||||
"gravatar_avatar": "false",
|
||||
"weserv_avatar": "false",
|
||||
"groups": [
|
||||
"group-1",
|
||||
"group-2"
|
||||
]
|
||||
}
|
||||
|
||||
```
|
||||
|
||||
## Usage example
|
||||
|
||||
### Manually
|
||||
|
||||
The script can be run manually in the terminal for initial bootstrapping of your lldap instance.
|
||||
You should make sure that the [required packages](#required-packages) are installed
|
||||
and the [environment variables](#environment-variables) are configured properly.
|
||||
|
||||
```bash
|
||||
export LLDAP_URL=http://localhost:8080
|
||||
export LLDAP_ADMIN_USERNAME=admin
|
||||
export LLDAP_ADMIN_PASSWORD=changeme
|
||||
export USER_CONFIGS_DIR="$(realpath ./configs/user)"
|
||||
export GROUP_CONFIGS_DIR="$(realpath ./configs/group)"
|
||||
export LLDAP_SET_PASSWORD_PATH="$(realpath ./lldap_set_password)"
|
||||
export DO_CLEANUP=false
|
||||
./bootstrap.sh
|
||||
```
|
||||
|
||||
### Docker compose
|
||||
|
||||
Let's suppose you have the next file structure:
|
||||
|
||||
```text
|
||||
./
|
||||
├─ docker-compose.yaml
|
||||
└─ bootstrap
|
||||
├─ bootstrap.sh
|
||||
└─ user-configs
|
||||
│ ├─ user-1.json
|
||||
│ ├─ ...
|
||||
│ └─ user-n.json
|
||||
└─ group-configs
|
||||
├─ group-1.json
|
||||
├─ ...
|
||||
└─ group-n.json
|
||||
|
||||
```
|
||||
|
||||
You should mount `bootstrap` dir to lldap container and set the corresponding `env` variables:
|
||||
|
||||
```yaml
|
||||
version: "3"
|
||||
|
||||
services:
|
||||
lldap:
|
||||
image: lldap/lldap:v0.5.0
|
||||
volumes:
|
||||
- ./bootstrap:/bootstrap
|
||||
ports:
|
||||
- "3890:3890" # For LDAP
|
||||
- "17170:17170" # For the web front-end
|
||||
environment:
|
||||
# envs required for lldap
|
||||
- LLDAP_LDAP_USER_EMAIL=admin@example.com
|
||||
- LLDAP_LDAP_USER_PASS=changeme
|
||||
- LLDAP_LDAP_BASE_DN=dc=example,dc=com
|
||||
|
||||
# envs required for bootstrap.sh
|
||||
- LLDAP_URL=http://localhost:17170
|
||||
- LLDAP_ADMIN_USERNAME=admin
|
||||
- LLDAP_ADMIN_PASSWORD=changeme # same as LLDAP_LDAP_USER_PASS
|
||||
- USER_CONFIGS_DIR=/bootstrap/user-configs
|
||||
- GROUP_CONFIGS_DIR=/bootstrap/group-configs
|
||||
- DO_CLEANUP=false
|
||||
```
|
||||
|
||||
Then, to bootstrap your lldap just run `docker compose exec lldap /bootstrap/bootstrap.sh`.
|
||||
If config files were changed, re-run the `bootstrap.sh` with the same command.
|
||||
|
||||
### Kubernetes job
|
||||
|
||||
```yaml
|
||||
apiVersion: batch/v1
|
||||
kind: Job
|
||||
metadata:
|
||||
name: lldap-bootstrap
|
||||
# Next annotations are required if the job managed by Argo CD,
|
||||
# so Argo CD can relaunch the job on every app sync action
|
||||
annotations:
|
||||
argocd.argoproj.io/hook: PostSync
|
||||
argocd.argoproj.io/hook-delete-policy: BeforeHookCreation
|
||||
spec:
|
||||
template:
|
||||
spec:
|
||||
restartPolicy: OnFailure
|
||||
containers:
|
||||
- name: lldap-bootstrap
|
||||
image: lldap/lldap:v0.5.0
|
||||
|
||||
command:
|
||||
- /bootstrap/bootstrap.sh
|
||||
|
||||
env:
|
||||
- name: LLDAP_URL
|
||||
value: "http://lldap:8080"
|
||||
|
||||
- name: LLDAP_ADMIN_USERNAME
|
||||
valueFrom: { secretKeyRef: { name: lldap-admin-user, key: username } }
|
||||
|
||||
- name: LLDAP_ADMIN_PASSWORD
|
||||
valueFrom: { secretKeyRef: { name: lldap-admin-user, key: password } }
|
||||
|
||||
- name: DO_CLEANUP
|
||||
value: "true"
|
||||
|
||||
volumeMounts:
|
||||
- name: bootstrap
|
||||
mountPath: /bootstrap/bootstrap.sh
|
||||
subPath: bootstrap.sh
|
||||
|
||||
- name: user-configs
|
||||
mountPath: /user-configs
|
||||
readOnly: true
|
||||
|
||||
- name: group-configs
|
||||
mountPath: /group-configs
|
||||
readOnly: true
|
||||
|
||||
volumes:
|
||||
- name: bootstrap
|
||||
configMap:
|
||||
name: bootstrap
|
||||
defaultMode: 0555
|
||||
items:
|
||||
- key: bootstrap.sh
|
||||
path: bootstrap.sh
|
||||
|
||||
- name: user-configs
|
||||
projected:
|
||||
sources:
|
||||
- secret:
|
||||
name: lldap-admin-user
|
||||
items:
|
||||
- key: user-config.json
|
||||
path: admin-config.json
|
||||
- secret:
|
||||
name: lldap-password-manager-user
|
||||
items:
|
||||
- key: user-config.json
|
||||
path: password-manager-config.json
|
||||
- secret:
|
||||
name: lldap-bootstrap-configs
|
||||
items:
|
||||
- key: user-configs.json
|
||||
path: user-configs.json
|
||||
|
||||
- name: group-configs
|
||||
projected:
|
||||
sources:
|
||||
- secret:
|
||||
name: lldap-bootstrap-configs
|
||||
items:
|
||||
- key: group-configs.json
|
||||
path: group-configs.json
|
||||
```
|
||||
@@ -1,490 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
|
||||
set -e
|
||||
set -o pipefail
|
||||
|
||||
LLDAP_URL="${LLDAP_URL}"
|
||||
LLDAP_ADMIN_USERNAME="${LLDAP_ADMIN_USERNAME}"
|
||||
LLDAP_ADMIN_PASSWORD="${LLDAP_ADMIN_PASSWORD}"
|
||||
USER_CONFIGS_DIR="${USER_CONFIGS_DIR:-/user-configs}"
|
||||
GROUP_CONFIGS_DIR="${GROUP_CONFIGS_DIR:-/group-configs}"
|
||||
LLDAP_SET_PASSWORD_PATH="${LLDAP_SET_PASSWORD_PATH:-/app/lldap_set_password}"
|
||||
DO_CLEANUP="${DO_CLEANUP:-false}"
|
||||
|
||||
check_install_dependencies() {
|
||||
local commands=('curl' 'jq' 'jo')
|
||||
local commands_not_found='false'
|
||||
|
||||
if ! hash "${commands[@]}" 2>/dev/null; then
|
||||
if hash 'apk' 2>/dev/null && [[ $EUID -eq 0 ]]; then
|
||||
apk add "${commands[@]}"
|
||||
elif hash 'apt' 2>/dev/null && [[ $EUID -eq 0 ]]; then
|
||||
apt update -yqq
|
||||
apt install -yqq "${commands[@]}"
|
||||
else
|
||||
local command=''
|
||||
for command in "${commands[@]}"; do
|
||||
if ! hash "$command" 2>/dev/null; then
|
||||
printf 'Command not found "%s"\n' "$command"
|
||||
fi
|
||||
done
|
||||
commands_not_found='true'
|
||||
fi
|
||||
fi
|
||||
|
||||
if [[ "$commands_not_found" == 'true' ]]; then
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
check_required_env_vars() {
|
||||
local env_var_not_specified='false'
|
||||
local dual_env_vars_list=(
|
||||
'LLDAP_URL'
|
||||
'LLDAP_ADMIN_USERNAME'
|
||||
'LLDAP_ADMIN_PASSWORD'
|
||||
)
|
||||
|
||||
local dual_env_var_name=''
|
||||
for dual_env_var_name in "${dual_env_vars_list[@]}"; do
|
||||
local dual_env_var_file_name="${dual_env_var_name}_FILE"
|
||||
|
||||
if [[ -z "${!dual_env_var_name}" ]] && [[ -z "${!dual_env_var_file_name}" ]]; then
|
||||
printf 'Please specify "%s" or "%s" variable!\n' "$dual_env_var_name" "$dual_env_var_file_name" >&2
|
||||
env_var_not_specified='true'
|
||||
else
|
||||
if [[ -n "${!dual_env_var_file_name}" ]]; then
|
||||
declare -g "$dual_env_var_name"="$(cat "${!dual_env_var_file_name}")"
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ "$env_var_not_specified" == 'true' ]]; then
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
check_configs_validity() {
|
||||
local config_file='' config_invalid='false'
|
||||
for config_file in "$@"; do
|
||||
local error=''
|
||||
if ! error="$(jq '.' -- "$config_file" 2>&1 >/dev/null)"; then
|
||||
printf '%s: %s\n' "$config_file" "$error"
|
||||
config_invalid='true'
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ "$config_invalid" == 'true' ]]; then
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
auth() {
|
||||
local url="$1" admin_username="$2" admin_password="$3"
|
||||
|
||||
local response
|
||||
response="$(curl --silent --request POST \
|
||||
--url "$url/auth/simple/login" \
|
||||
--header 'Content-Type: application/json' \
|
||||
--data "$(jo -- username="$admin_username" password="$admin_password")")"
|
||||
|
||||
TOKEN="$(printf '%s' "$response" | jq --raw-output .token)"
|
||||
}
|
||||
|
||||
make_query() {
|
||||
local query_file="$1" variables_file="$2"
|
||||
|
||||
curl --silent --request POST \
|
||||
--url "$LLDAP_URL/api/graphql" \
|
||||
--header "Authorization: Bearer $TOKEN" \
|
||||
--header 'Content-Type: application/json' \
|
||||
--data @<(jq --slurpfile variables "$variables_file" '. + {"variables": $variables[0]}' "$query_file")
|
||||
}
|
||||
|
||||
get_group_list() {
|
||||
local query='{"query":"query GetGroupList {groups {id displayName}}","operationName":"GetGroupList"}'
|
||||
make_query <(printf '%s' "$query") <(printf '{}')
|
||||
}
|
||||
|
||||
get_group_array() {
|
||||
get_group_list | jq --raw-output '.data.groups[].displayName'
|
||||
}
|
||||
|
||||
group_exists() {
|
||||
if [[ "$(get_group_list | jq --raw-output --arg displayName "$1" '.data.groups | any(.[]; select(.displayName == $displayName))')" == 'true' ]]; then
|
||||
return 0
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
get_group_id() {
|
||||
get_group_list | jq --raw-output --arg displayName "$1" '.data.groups[] | if .displayName == $displayName then .id else empty end'
|
||||
}
|
||||
|
||||
create_group() {
|
||||
local group_name="$1"
|
||||
|
||||
if group_exists "$group_name"; then
|
||||
printf 'Group "%s" (%s) already exists\n' "$group_name" "$(get_group_id "$group_name")"
|
||||
return
|
||||
fi
|
||||
|
||||
# shellcheck disable=SC2016
|
||||
local query='{"query":"mutation CreateGroup($name: String!) {createGroup(name: $name) {id displayName}}","operationName":"CreateGroup"}'
|
||||
|
||||
local response='' error=''
|
||||
response="$(make_query <(printf '%s' "$query") <(jo -- name="$group_name"))"
|
||||
error="$(printf '%s' "$response" | jq --raw-output '.errors | if . != null then .[].message else empty end')"
|
||||
if [[ -n "$error" ]]; then
|
||||
printf '%s\n' "$error"
|
||||
else
|
||||
printf 'Group "%s" (%s) successfully created\n' "$group_name" "$(printf '%s' "$response" | jq --raw-output '.data.createGroup.id')"
|
||||
fi
|
||||
}
|
||||
|
||||
delete_group() {
|
||||
local group_name="$1" id=''
|
||||
|
||||
if ! group_exists "$group_name"; then
|
||||
printf '[WARNING] Group "%s" does not exist\n' "$group_name"
|
||||
return
|
||||
fi
|
||||
|
||||
id="$(get_group_id "$group_name")"
|
||||
|
||||
# shellcheck disable=SC2016
|
||||
local query='{"query":"mutation DeleteGroupQuery($groupId: Int!) {deleteGroup(groupId: $groupId) {ok}}","operationName":"DeleteGroupQuery"}'
|
||||
|
||||
local response='' error=''
|
||||
response="$(make_query <(printf '%s' "$query") <(jo -- groupId="$id"))"
|
||||
error="$(printf '%s' "$response" | jq --raw-output '.errors | if . != null then .[].message else empty end')"
|
||||
if [[ -n "$error" ]]; then
|
||||
printf '%s\n' "$error"
|
||||
else
|
||||
printf 'Group "%s" (%s) successfully deleted\n' "$group_name" "$id"
|
||||
fi
|
||||
}
|
||||
|
||||
get_user_details() {
|
||||
local id="$1"
|
||||
|
||||
# shellcheck disable=SC2016
|
||||
local query='{"query":"query GetUserDetails($id: String!) {user(userId: $id) {id email displayName firstName lastName creationDate uuid groups {id displayName}}}","operationName":"GetUserDetails"}'
|
||||
make_query <(printf '%s' "$query") <(jo -- id="$id")
|
||||
}
|
||||
|
||||
user_in_group() {
|
||||
local user_id="$1" group_name="$2"
|
||||
|
||||
if ! group_exists "$group_name"; then
|
||||
printf '[WARNING] Group "%s" does not exist\n' "$group_name"
|
||||
return
|
||||
fi
|
||||
|
||||
if ! user_exists "$user_id"; then
|
||||
printf 'User "%s" is not exists\n' "$user_id"
|
||||
return
|
||||
fi
|
||||
|
||||
if [[ "$(get_user_details "$user_id" | jq --raw-output --arg displayName "$group_name" '.data.user.groups | any(.[]; select(.displayName == $displayName))')" == 'true' ]]; then
|
||||
return 0
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
add_user_to_group() {
|
||||
local user_id="$1" group_name="$2" group_id=''
|
||||
|
||||
if ! group_exists "$group_name"; then
|
||||
printf '[WARNING] Group "%s" does not exist\n' "$group_name"
|
||||
return
|
||||
fi
|
||||
|
||||
group_id="$(get_group_id "$group_name")"
|
||||
|
||||
if user_in_group "$user_id" "$group_name"; then
|
||||
printf 'User "%s" already in group "%s" (%s)\n' "$user_id" "$group_name" "$group_id"
|
||||
return
|
||||
fi
|
||||
|
||||
# shellcheck disable=SC2016
|
||||
local query='{"query":"mutation AddUserToGroup($user: String!, $group: Int!) {addUserToGroup(userId: $user, groupId: $group) {ok}}","operationName":"AddUserToGroup"}'
|
||||
|
||||
local response='' error=''
|
||||
response="$(make_query <(printf '%s' "$query") <(jo -- user="$user_id" group="$group_id"))"
|
||||
error="$(printf '%s' "$response" | jq '.errors | if . != null then .[].message else empty end')"
|
||||
if [[ -n "$error" ]]; then
|
||||
printf '%s\n' "$error"
|
||||
else
|
||||
printf 'User "%s" successfully added to the group "%s" (%s)\n' "$user_id" "$group_name" "$group_id"
|
||||
fi
|
||||
}
|
||||
|
||||
remove_user_from_group() {
|
||||
local user_id="$1" group_name="$2" group_id=''
|
||||
|
||||
if ! group_exists "$group_name"; then
|
||||
printf '[WARNING] Group "%s" does not exist\n' "$group_name"
|
||||
return
|
||||
fi
|
||||
|
||||
group_id="$(get_group_id "$group_name")"
|
||||
|
||||
# shellcheck disable=SC2016
|
||||
local query='{"operationName":"RemoveUserFromGroup","query":"mutation RemoveUserFromGroup($user: String!, $group: Int!) {removeUserFromGroup(userId: $user, groupId: $group) {ok}}"}'
|
||||
|
||||
local response='' error=''
|
||||
response="$(make_query <(printf '%s' "$query") <(jo -- user="$user_id" group="$group_id"))"
|
||||
error="$(printf '%s' "$response" | jq '.errors | if . != null then .[].message else empty end')"
|
||||
if [[ -n "$error" ]]; then
|
||||
printf '%s\n' "$error"
|
||||
else
|
||||
printf 'User "%s" successfully removed from the group "%s" (%s)\n' "$user_id" "$group_name" "$group_id"
|
||||
fi
|
||||
}
|
||||
|
||||
get_users_list() {
|
||||
# shellcheck disable=SC2016
|
||||
local query='{"query": "query ListUsersQuery($filters: RequestFilter) {users(filters: $filters) {id email displayName firstName lastName creationDate}}","operationName": "ListUsersQuery"}'
|
||||
make_query <(printf '%s' "$query") <(jo -- filters=null)
|
||||
}
|
||||
|
||||
user_exists() {
|
||||
if [[ "$(get_users_list | jq --raw-output --arg id "$1" '.data.users | any(.[]; contains({"id": $id}))')" == 'true' ]]; then
|
||||
return 0
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
delete_user() {
|
||||
local id="$1"
|
||||
|
||||
if ! user_exists "$id"; then
|
||||
printf 'User "%s" is not exists\n' "$id"
|
||||
return
|
||||
fi
|
||||
|
||||
# shellcheck disable=SC2016
|
||||
local query='{"query": "mutation DeleteUserQuery($user: String!) {deleteUser(userId: $user) {ok}}","operationName": "DeleteUserQuery"}'
|
||||
|
||||
local response='' error=''
|
||||
response="$(make_query <(printf '%s' "$query") <(jo -- user="$id"))"
|
||||
error="$(printf '%s' "$response" | jq --raw-output '.errors | if . != null then .[].message else empty end')"
|
||||
if [[ -n "$error" ]]; then
|
||||
printf '%s\n' "$error"
|
||||
else
|
||||
printf 'User "%s" successfully deleted\n' "$id"
|
||||
fi
|
||||
}
|
||||
|
||||
__common_user_mutation_query() {
|
||||
local \
|
||||
query="$1" \
|
||||
id="${2:-null}" \
|
||||
email="${3:-null}" \
|
||||
displayName="${4:-null}" \
|
||||
firstName="${5:-null}" \
|
||||
lastName="${6:-null}" \
|
||||
avatar_file="${7:-null}" \
|
||||
avatar_url="${8:-null}" \
|
||||
gravatar_avatar="${9:-false}" \
|
||||
weserv_avatar="${10:-false}"
|
||||
|
||||
local variables_arr=(
|
||||
'-s' "id=$id"
|
||||
'-s' "email=$email"
|
||||
'-s' "displayName=$displayName"
|
||||
'-s' "firstName=$firstName"
|
||||
'-s' "lastName=$lastName"
|
||||
)
|
||||
|
||||
local temp_avatar_file=''
|
||||
|
||||
if [[ "$gravatar_avatar" == 'true' ]]; then
|
||||
avatar_url="https://gravatar.com/avatar/$(printf '%s' "$email" | sha256sum | cut -d ' ' -f 1)?size=512"
|
||||
fi
|
||||
|
||||
if [[ "$avatar_url" != 'null' ]]; then
|
||||
temp_avatar_file="${TMP_AVATAR_DIR}/$(printf '%s' "$avatar_url" | md5sum | cut -d ' ' -f 1)"
|
||||
|
||||
if ! [[ -f "$temp_avatar_file" ]]; then
|
||||
if [[ "$weserv_avatar" == 'true' ]]; then
|
||||
avatar_url="https://wsrv.nl/?url=$avatar_url&output=jpg"
|
||||
fi
|
||||
curl --silent --location --output "$temp_avatar_file" "$avatar_url"
|
||||
fi
|
||||
|
||||
avatar_file="$temp_avatar_file"
|
||||
fi
|
||||
|
||||
if [[ "$avatar_file" == 'null' ]]; then
|
||||
variables_arr+=('-s' 'avatar=null')
|
||||
else
|
||||
variables_arr+=("avatar=%$avatar_file")
|
||||
fi
|
||||
|
||||
make_query <(printf '%s' "$query") <(jo -- user=:<(jo -- "${variables_arr[@]}"))
|
||||
}
|
||||
|
||||
create_user() {
|
||||
local id="$1"
|
||||
|
||||
if user_exists "$id"; then
|
||||
printf 'User "%s" already exists\n' "$id"
|
||||
return
|
||||
fi
|
||||
|
||||
# shellcheck disable=SC2016
|
||||
local query='{"query":"mutation CreateUser($user: CreateUserInput!) {createUser(user: $user) {id creationDate}}","operationName":"CreateUser"}'
|
||||
|
||||
local response='' error=''
|
||||
response="$(__common_user_mutation_query "$query" "$@")"
|
||||
error="$(printf '%s' "$response" | jq --raw-output '.errors | if . != null then .[].message else empty end')"
|
||||
if [[ -n "$error" ]]; then
|
||||
printf '%s\n' "$error"
|
||||
else
|
||||
printf 'User "%s" successfully created\n' "$id"
|
||||
fi
|
||||
}
|
||||
|
||||
update_user() {
|
||||
local id="$1"
|
||||
|
||||
if ! user_exists "$id"; then
|
||||
printf 'User "%s" is not exists\n' "$id"
|
||||
return
|
||||
fi
|
||||
|
||||
# shellcheck disable=SC2016
|
||||
local query='{"query":"mutation UpdateUser($user: UpdateUserInput!) {updateUser(user: $user) {ok}}","operationName":"UpdateUser"}'
|
||||
|
||||
local response='' error=''
|
||||
response="$(__common_user_mutation_query "$query" "$@")"
|
||||
error="$(printf '%s' "$response" | jq --raw-output '.errors | if . != null then .[].message else empty end')"
|
||||
if [[ -n "$error" ]]; then
|
||||
printf '%s\n' "$error"
|
||||
else
|
||||
printf 'User "%s" successfully updated\n' "$id"
|
||||
fi
|
||||
}
|
||||
|
||||
create_update_user() {
|
||||
local id="$1"
|
||||
|
||||
if user_exists "$id"; then
|
||||
update_user "$@"
|
||||
else
|
||||
create_user "$@"
|
||||
fi
|
||||
}
|
||||
|
||||
main() {
|
||||
check_install_dependencies
|
||||
check_required_env_vars
|
||||
|
||||
local user_config_files=("${USER_CONFIGS_DIR}"/*.json)
|
||||
local group_config_files=("${GROUP_CONFIGS_DIR}"/*.json)
|
||||
|
||||
if ! check_configs_validity "${group_config_files[@]}" "${user_config_files[@]}"; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
until curl --silent -o /dev/null "$LLDAP_URL"; do
|
||||
printf 'Waiting lldap to start...\n'
|
||||
sleep 10
|
||||
done
|
||||
|
||||
auth "$LLDAP_URL" "$LLDAP_ADMIN_USERNAME" "$LLDAP_ADMIN_PASSWORD"
|
||||
|
||||
local redundant_groups=''
|
||||
redundant_groups="$(get_group_list | jq '[ .data.groups[].displayName ]' | jq --compact-output '. - ["lldap_admin","lldap_password_manager","lldap_strict_readonly"]')"
|
||||
|
||||
printf -- '\n--- groups ---\n'
|
||||
local group_config=''
|
||||
while read -r group_config; do
|
||||
local group_name=''
|
||||
group_name="$(printf '%s' "$group_config" | jq --raw-output '.name')"
|
||||
create_group "$group_name"
|
||||
redundant_groups="$(printf '%s' "$redundant_groups" | jq --compact-output --arg name "$group_name" '. - [$name]')"
|
||||
done < <(jq --compact-output '.' -- "${group_config_files[@]}")
|
||||
printf -- '--- groups ---\n'
|
||||
|
||||
printf -- '\n--- redundant groups ---\n'
|
||||
if [[ "$redundant_groups" == '[]' ]]; then
|
||||
printf 'There are no redundant groups\n'
|
||||
else
|
||||
local group_name=''
|
||||
while read -r group_name; do
|
||||
if [[ "$DO_CLEANUP" == 'true' ]]; then
|
||||
delete_group "$group_name"
|
||||
else
|
||||
printf '[WARNING] Group "%s" is not declared in config files\n' "$group_name"
|
||||
fi
|
||||
done < <(printf '%s' "$redundant_groups" | jq --raw-output '.[]')
|
||||
fi
|
||||
printf -- '--- redundant groups ---\n'
|
||||
|
||||
local redundant_users=''
|
||||
redundant_users="$(get_users_list | jq '[ .data.users[].id ]' | jq --compact-output --arg admin_id "$LLDAP_ADMIN_USERNAME" '. - [$admin_id]')"
|
||||
|
||||
TMP_AVATAR_DIR="$(mktemp -d)"
|
||||
|
||||
local user_config=''
|
||||
while read -r user_config; do
|
||||
local field='' id='' email='' displayName='' firstName='' lastName='' avatar_file='' avatar_url='' gravatar_avatar='' weserv_avatar='' password=''
|
||||
for field in 'id' 'email' 'displayName' 'firstName' 'lastName' 'avatar_file' 'avatar_url' 'gravatar_avatar' 'weserv_avatar' 'password'; do
|
||||
declare "$field"="$(printf '%s' "$user_config" | jq --raw-output --arg field "$field" '.[$field]')"
|
||||
done
|
||||
printf -- '\n--- %s ---\n' "$id"
|
||||
|
||||
create_update_user "$id" "$email" "$displayName" "$firstName" "$lastName" "$avatar_file" "$avatar_url" "$gravatar_avatar" "$weserv_avatar"
|
||||
redundant_users="$(printf '%s' "$redundant_users" | jq --compact-output --arg id "$id" '. - [$id]')"
|
||||
|
||||
if [[ "$password" != 'null' ]] && [[ "$password" != '""' ]]; then
|
||||
"$LLDAP_SET_PASSWORD_PATH" --base-url "$LLDAP_URL" --token "$TOKEN" --username "$id" --password "$password"
|
||||
fi
|
||||
|
||||
local redundant_user_groups=''
|
||||
redundant_user_groups="$(get_user_details "$id" | jq '[ .data.user.groups[].displayName ]')"
|
||||
|
||||
local group=''
|
||||
while read -r group; do
|
||||
if [[ -n "$group" ]]; then
|
||||
add_user_to_group "$id" "$group"
|
||||
redundant_user_groups="$(printf '%s' "$redundant_user_groups" | jq --compact-output --arg group "$group" '. - [$group]')"
|
||||
fi
|
||||
done < <(printf '%s' "$user_config" | jq --raw-output '.groups | if . == null then "" else .[] end')
|
||||
|
||||
local user_group_name=''
|
||||
while read -r user_group_name; do
|
||||
if [[ "$DO_CLEANUP" == 'true' ]]; then
|
||||
remove_user_from_group "$id" "$user_group_name"
|
||||
else
|
||||
printf '[WARNING] User "%s" is not declared as member of the "%s" group in the config files\n' "$id" "$user_group_name"
|
||||
fi
|
||||
done < <(printf '%s' "$redundant_user_groups" | jq --raw-output '.[]')
|
||||
printf -- '--- %s ---\n' "$id"
|
||||
done < <(jq --compact-output '.' -- "${user_config_files[@]}")
|
||||
|
||||
rm -r "$TMP_AVATAR_DIR"
|
||||
|
||||
printf -- '\n--- redundant users ---\n'
|
||||
if [[ "$redundant_users" == '[]' ]]; then
|
||||
printf 'There are no redundant users\n'
|
||||
else
|
||||
local id=''
|
||||
while read -r id; do
|
||||
if [[ "$DO_CLEANUP" == 'true' ]]; then
|
||||
delete_user "$id"
|
||||
else
|
||||
printf '[WARNING] User "%s" is not declared in config files\n' "$id"
|
||||
fi
|
||||
done < <(printf '%s' "$redundant_users" | jq --raw-output '.[]')
|
||||
fi
|
||||
printf -- '--- redundant users ---\n'
|
||||
}
|
||||
|
||||
main "$@"
|
||||
@@ -6,12 +6,11 @@ LDAP configuration is in ```/dokuwiki/conf/local.protected.php```:
|
||||
<?php
|
||||
$conf['useacl'] = 1; //enable ACL
|
||||
$conf['authtype'] = 'authldap'; //enable this Auth plugin
|
||||
$conf['superuser'] = 'admin';
|
||||
$conf['plugin']['authldap']['server'] = 'ldap://lldap_server:3890'; #IP of your lldap
|
||||
$conf['plugin']['authldap']['usertree'] = 'ou=people,dc=example,dc=com';
|
||||
$conf['plugin']['authldap']['grouptree'] = 'ou=groups, dc=example, dc=com';
|
||||
$conf['plugin']['authldap']['userfilter'] = '(&(uid=%{user})(objectClass=person))';
|
||||
$conf['plugin']['authldap']['groupfilter'] = '(&(member=%{dn})(objectClass=groupOfUniqueNames))';
|
||||
$conf['plugin']['authldap']['groupfilter'] = '(objectClass=group)';
|
||||
$conf['plugin']['authldap']['attributes'] = array('cn', 'displayname', 'mail', 'givenname', 'objectclass', 'sn', 'uid', 'memberof');
|
||||
$conf['plugin']['authldap']['version'] = 3;
|
||||
$conf['plugin']['authldap']['binddn'] = 'cn=admin,ou=people,dc=example,dc=com';
|
||||
|
||||
@@ -1,22 +0,0 @@
|
||||
# Basic LDAP auth for an Ergo IRC server
|
||||
|
||||
[Main documentation here.](https://github.com/ergochat/ergo-ldap)
|
||||
|
||||
For simple user auth prepare a ldap-config.yaml with the following settings
|
||||
|
||||
```
|
||||
host: "127.0.0.1"
|
||||
port: 3890
|
||||
timeout: 30s
|
||||
|
||||
# uncomment for TLS / LDAPS:
|
||||
# use-ssl: true
|
||||
|
||||
bind-dn: "uid=%s,ou=people,dc=example,dc=org"
|
||||
```
|
||||
|
||||
Then add the compiled ergo-ldap program to your Ergo folder and make sure it can be executed by the same user your Ergo IRCd runs as.
|
||||
|
||||
Follow the instructions in the main Ergo config file's accounts section on how to execute an external auth program.
|
||||
|
||||
Make sure SASL auth is enabled and then restart Ergo to enable LDAP linked SASL auth.
|
||||
@@ -1,30 +0,0 @@
|
||||
# GitLab Configuration
|
||||
|
||||
Members of the group ``git_user`` will have access to GitLab.
|
||||
|
||||
Edit ``/etc/gitlab/gitlab.rb``:
|
||||
|
||||
```ruby
|
||||
gitlab_rails['ldap_enabled'] = true
|
||||
gitlab_rails['ldap_servers'] = {
|
||||
'main' => {
|
||||
'label' => 'LDAP',
|
||||
'host' => 'ldap.example.com',
|
||||
'port' => 3890,
|
||||
'uid' => 'uid',
|
||||
'base' => 'ou=people,dc=example,dc=com',
|
||||
'encryption' => 'plain',
|
||||
'bind_dn' => 'uid=bind_user,ou=people,dc=example,dc=com',
|
||||
'password' => '<bind user password>',
|
||||
'active_directory' => false,
|
||||
'user_filter' => '(&(objectclass=person)(memberof=cn=git_user,ou=groups,dc=example,dc=com))',
|
||||
'attributes' => {
|
||||
'username' => 'uid',
|
||||
'email' => 'mail',
|
||||
'name' => 'displayName',
|
||||
'first_name' => 'givenName',
|
||||
'last_name' => 'sn'
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
@@ -37,7 +37,7 @@ search_base_dns = ["dc=example,dc=org"]
|
||||
[servers.attributes]
|
||||
member_of = "memberOf"
|
||||
email = "mail"
|
||||
name = "displayName"
|
||||
name = "givenName"
|
||||
surname = "sn"
|
||||
username = "uid"
|
||||
|
||||
|
||||
@@ -1,28 +0,0 @@
|
||||
# Configuration for Grocy
|
||||
|
||||
Adjust the following values in the file `config/data/config.php` or add environment variables for them (prefixed with `GROCY_`).
|
||||
|
||||
NOTE: If the environment variables are not working (for example in the linuxserver.io Docker Image), you need to add `clear_env = no` under the `[www]` in `/config/php/www2.conf`.
|
||||
|
||||
Replace `dc=example,dc=com` with your LLDAP configured domain.
|
||||
|
||||
### AUTH_CLASS
|
||||
Needs to be set to `Grocy\Middleware\LdapAuthMiddleware` in order to use LDAP
|
||||
|
||||
### LDAP_ADDRESS
|
||||
The address of your ldap server, eg: `ldap://lldap.example.com:389`
|
||||
|
||||
### LDAP_BASE_DN
|
||||
The base dn, usually points directly to the `people`, eg: `ou=people,dc=example,dc=com`
|
||||
|
||||
### LDAP_BIND_DN
|
||||
The reader user for lldap, eg: `uid=ldap-reader,ou=people,dc=example,dc=com`
|
||||
|
||||
### LDAP_BIND_PW
|
||||
The password for the reader user
|
||||
|
||||
### LDAP_USER_FILTER
|
||||
The filter to use for the users, eg. for a separate group: `(&(objectClass=person)(memberof=cn=grocy_users,ou=groups,dc=example,dc=com))`
|
||||
|
||||
### LDAP_UID_ATTR
|
||||
The user id attribute, should be `uid`
|
||||
@@ -6,8 +6,8 @@ Home Assistant configures ldap auth via the [Command Line Auth Provider](https:/
|
||||
|
||||
The [auth script](lldap-ha-auth.sh) attempts to authenticate a user against an LLDAP server, using credentials provided via `username` and `password` environment variables. The first argument must be the URL of your LLDAP server, accessible from Home Assistant. You can provide an additional optional argument to confine allowed logins to a single group. The script will output the user's display name as the `name` variable, if not empty.
|
||||
|
||||
1. Copy the [auth script](lldap-ha-auth.sh) to your home assistant instance. In this example, we use `/config/lldap-ha-auth.sh`.
|
||||
- Set the script as executable by running `chmod +x /config/lldap-ha-auth.sh`
|
||||
1. Copy the [auth script](lldap-ha-auth.sh) to your home assistant instance. In this example, we use `/config/lldap-auth.sh`.
|
||||
- Set the script as executable by running `chmod +x /config/lldap-auth-sh`
|
||||
2. Add the following to your configuration.yaml in Home assistant:
|
||||
```yaml
|
||||
homeassistant:
|
||||
@@ -15,21 +15,10 @@ homeassistant:
|
||||
# Ensure you have the homeassistant provider enabled if you want to continue using your existing accounts
|
||||
- type: homeassistant
|
||||
- type: command_line
|
||||
command: /config/lldap-ha-auth.sh
|
||||
# arguments: [<LDAP Host>, <regular user group>, <admin user group>, <local user group>]
|
||||
# <regular user group>: Find users that has permission to access homeassistant, anyone inside
|
||||
# this group will have the default 'system-users' permission in homeassistant.
|
||||
#
|
||||
# <admin user group>: Allow users in the <regular user group> to be assigned into 'system-admin' group.
|
||||
# Anyone inside this group will not have the 'system-users' permission as only one permission group
|
||||
# is allowed in homeassistant
|
||||
#
|
||||
# <local user group>: Users in the <local user group> (e.g., 'homeassistant_local') can only access
|
||||
# homeassistant inside LAN network.
|
||||
#
|
||||
# Only the first argument is required. ["https://lldap.example.com"] allows all users to log in from
|
||||
# anywhere and have 'system-users' permissions.
|
||||
args: ["https://lldap.example.com", "homeassistant_user", "homeassistant_admin", "homeassistant_local"]
|
||||
command: /config/lldap-auth.sh
|
||||
# Only allow users in the 'homeassistant_user' group to login.
|
||||
# Change to ["https://lldap.example.com"] to allow all users
|
||||
args: ["https://lldap.example.com", "homeassistant_user"]
|
||||
meta: true
|
||||
```
|
||||
3. Reload your config or restart Home Assistant
|
||||
|
||||
@@ -37,9 +37,9 @@ Otherwise, just use:
|
||||
```
|
||||
### Admin Base DN
|
||||
|
||||
The DN to search for your admins.
|
||||
The DN of your admin group. If you have `media_admin` as your group you would use:
|
||||
```
|
||||
ou=people,dc=example,dc=com
|
||||
cn=media_admin,ou=groups,dc=example,dc=com
|
||||
```
|
||||
|
||||
### Admin Filter
|
||||
@@ -49,15 +49,8 @@ that), use:
|
||||
```
|
||||
(memberof=cn=media_admin,ou=groups,dc=example,dc=com)
|
||||
```
|
||||
Bear in mind that admins must also be a member of the users group if you use one.
|
||||
|
||||
Otherwise, you can use LLDAP's admin group:
|
||||
```
|
||||
(memberof=cn=lldap_admin,ou=groups,dc=example,dc=com)
|
||||
```
|
||||
|
||||
## Password change
|
||||
To allow changing Passwords via Jellyfin the following things are required
|
||||
- The bind user needs to have the group lldap_password_manager (changing passwords of members of the group lldap_admin does not work to prevent privilege escalation)
|
||||
- Check `Allow Password Change`
|
||||
- `LDAP Password Attribute` Needs to be set to `userPassword`
|
||||
|
||||
@@ -1,81 +0,0 @@
|
||||
# Configuration for Jenkins
|
||||
|
||||
## Jenkins base setup
|
||||
|
||||
To setup LLDAP for Jenkins navigate to Dashboard/Manage Jenkins/Security.
|
||||
|
||||
*Note: Jenkins LDAP plugin has to be installed!</br>*
|
||||
*Note: "dc=example,dc=com" is default configuration, you should replace it with your base DN.*
|
||||
|
||||
1) Set **Security Realm** to **LDAP**
|
||||
2) Click Add Server
|
||||
3) Setup config fields as stated below
|
||||
|
||||
## Config fields
|
||||
|
||||
#### Server
|
||||
*(This can be replaced by server ip/your domain etc.)*
|
||||
```
|
||||
ldap://example.com:3890
|
||||
```
|
||||
### Advanced Server Configuration Dropdown
|
||||
|
||||
#### root DN
|
||||
```
|
||||
dc=example,dc=com
|
||||
```
|
||||
|
||||
#### Allow blank rootDN
|
||||
```
|
||||
true
|
||||
```
|
||||
|
||||
#### User search base
|
||||
```
|
||||
ou=people
|
||||
```
|
||||
|
||||
#### User search filter
|
||||
```
|
||||
uid={0}
|
||||
```
|
||||
|
||||
#### Group search base
|
||||
```
|
||||
ou=groups
|
||||
```
|
||||
|
||||
#### Group search filter
|
||||
```
|
||||
(& (cn={0})(objectclass=groupOfNames))
|
||||
```
|
||||
|
||||
#### Group membership
|
||||
Select Search for LDAP groups containing user and leave Group membership filter empty
|
||||
|
||||
#### Manager DN
|
||||
Leave here your admin account
|
||||
```
|
||||
cn=admin,ou=people,dc=example,dc=com
|
||||
```
|
||||
#### Manager Password
|
||||
Leave it as is
|
||||
|
||||
#### Display Name LDAP attribute
|
||||
Leave cn as it inputs username
|
||||
```
|
||||
cn
|
||||
```
|
||||
|
||||
#### Email Address LDAP attribute
|
||||
```
|
||||
mail
|
||||
```
|
||||
|
||||
### Tips & Tricks
|
||||
- Always use Test LDAP settings so you won't get locked out. It works without password.
|
||||
- If you want to setup your permissions, go to Authorization setting and select Matrix-based security. Add group/user (it has to exist in LLDAP) and you can grant him permissions. Note that Overall Read forbids users to read jenkins and execute actions. Administer gives full rights.
|
||||
|
||||
### Useful links:
|
||||
https://plugins.jenkins.io/ldap/</br>
|
||||
https://www.jenkins.io/doc/book/security/managing-security/
|
||||
@@ -1,19 +0,0 @@
|
||||
# Configuration for Kasm
|
||||
|
||||
In Kasm, go to *Admin* -> *Authentication* -> *LDAP* and add a configuration.
|
||||
- *Name*: whatever you like
|
||||
- *Url* is your lldap host (or IP) and port, e.g. `ldap://lldap.example.com:3890`
|
||||
- *Search Base* is is your base dn, e.g `dc=example,dc=com`
|
||||
- *Search Filter* is `(&(objectClass=person)(uid={0})(memberof=cn=kasm,ou=groups,dc=example,dc=com))`. Replace `cn=kasm,ou=groups,dc=example,dc=com` with the dn to the group necessary to login to Kasm.
|
||||
- *Group Membership Filter* `(&(objectClass=groupOfUniqueNames)(member={0}))`
|
||||
- *Email attribute* `mail`
|
||||
- *Service Account DN* a lldap user, preferably not a admin but a member of the group `lldap_strict readonly`. Mine is called `cn=query,ou=people,dc=example,dc=com`
|
||||
- *Service Account Password*: querys password
|
||||
- Activate *Search Subtree*, *Auto Create App User* and *Enabled*
|
||||
- under *Attribute Mapping* you can map the following:
|
||||
- *Email* -> `mail`
|
||||
- *First Name* -> `givenname`
|
||||
- *Last Name* -> `sn`
|
||||
- If you want to map groups from your lldap to Kasm, edit the group, scroll to *SSO Group Mappings* and add a new SSO mapping:
|
||||
- select your lldap as provider
|
||||
- *Group Attributes* is the full DN of your group, e.g. `cn=kasm_moreaccess,ou=groups,dc=example,dc=com`
|
||||
@@ -62,11 +62,3 @@ Once the groups are synchronized, go to "Manage > Groups" on the left. Click on
|
||||
|
||||
Assign the role "admin" to the group. Now you can log in as the LLDAP admin to
|
||||
the KeyCloak admin console.
|
||||
|
||||
## Fixing duplicate names or missing First Names for users
|
||||
|
||||
Since Keycloak and LLDAP use different attributes for different parts of a user's name, you may see duplicated or missing names for users in Keycloak. To fix this, update the attribute mappings:
|
||||
|
||||
Go back to "User Federation", edit your LDAP integration and click on the "Mappers" tab.
|
||||
|
||||
Find or create the "first name" mapper (it should have type `user-attribute-ldap-mapper`) and ensure the "LDAP Attribute" setting is set to `givenname`. Keycloak may have defaulted to `cn` which LLDAP uses for the "Display Name" of a user.
|
||||
|
||||
@@ -1,193 +0,0 @@
|
||||
# Configuration for LibreNMS
|
||||
|
||||
You can either configure LibreNMS from the webui or from the command line. This is a list of the variables that you should set.
|
||||
|
||||
## Essential
|
||||
|
||||
## auth_ldap_uid_attribute
|
||||
|
||||
```
|
||||
uid
|
||||
```
|
||||
|
||||
This sets 'uid' as the unique ldap attribute for users.
|
||||
|
||||
## auth_ldap_groupmemberattr
|
||||
|
||||
```
|
||||
member
|
||||
```
|
||||
|
||||
## auth_ldap_groups
|
||||
|
||||
```'
|
||||
{"nms_admin": {"level": 10}}'
|
||||
```
|
||||
|
||||
or
|
||||
|
||||
```
|
||||
auth_ldap_groups.nms_admin.level: 10
|
||||
```
|
||||
|
||||
These are both the same.
|
||||
|
||||
This example sets the group nms_admin as Admin (level 10).
|
||||
Set others to match more groups at different levels.
|
||||
|
||||
## auth_ldap_starttls
|
||||
|
||||
```
|
||||
false
|
||||
```
|
||||
|
||||
## auth_ldap_server
|
||||
|
||||
```
|
||||
[lldap server ip]
|
||||
```
|
||||
|
||||
## auth_ldap_port
|
||||
|
||||
```
|
||||
3890
|
||||
```
|
||||
|
||||
## auth_ldap_suffix
|
||||
|
||||
```
|
||||
,ou=people,dc=example,dc=com
|
||||
```
|
||||
|
||||
Not sure if the case of people actually matters.
|
||||
Make sure you keep the initial comma.
|
||||
|
||||
## auth_ldap_groupbase
|
||||
|
||||
```
|
||||
ou=groups,dc=example,dc=com
|
||||
```
|
||||
|
||||
## auth_mechanism
|
||||
|
||||
```
|
||||
ldap
|
||||
```
|
||||
Be careful with this as you will lock yourself out if ldap does not work correctly. Set back to 'mysql' to turn ldap off.
|
||||
|
||||
### auth_ldap_require_groupmembership
|
||||
|
||||
```
|
||||
false
|
||||
```
|
||||
|
||||
## Testing
|
||||
|
||||
Use the test script to make sure it works.
|
||||
```
|
||||
./script/auth_test.php -u <user>
|
||||
```
|
||||
Make sure the level is correctly populated. Should look like this:
|
||||
|
||||
```
|
||||
librenms:/opt/librenms# ./scripts/auth_test.php -uadmin
|
||||
Authentication Method: ldap
|
||||
Password:
|
||||
Authenticate user admin:
|
||||
AUTH SUCCESS
|
||||
|
||||
User (admin):
|
||||
username => admin
|
||||
realname => Administrator
|
||||
user_id => admin
|
||||
email => admin@example.com
|
||||
level => 10
|
||||
Groups: cn=nms_admin,ou=groups,dc=example,dc=com
|
||||
```
|
||||
|
||||
## Setting variables
|
||||
|
||||
### Web UI
|
||||
|
||||
You can set all the varibles in the web UI in: Settings -> Authentication -> LDAP Settings
|
||||
|
||||
### Command line
|
||||
|
||||
You can use the lnms command to *get* config options like this:
|
||||
```
|
||||
lnms config:get auth_ldap_uid_attribute
|
||||
```
|
||||
|
||||
You can use the lnms command to *set* config options like this:
|
||||
```
|
||||
lnms config:set auth_ldap_uid_attribute uid
|
||||
```
|
||||
|
||||
Read more [here](https://docs.librenms.org/Support/Configuration/)
|
||||
|
||||
### Pre load configuration for Docker
|
||||
|
||||
You can create a file named: /data/config/ldap.yaml and place your variables in there.
|
||||
|
||||
```
|
||||
librenms:/opt/librenms# cat /data/config/auth.yaml
|
||||
auth_mechanism: ldap
|
||||
|
||||
auth_ldap_server: 172.17.0.1
|
||||
auth_ldap_port: 3890
|
||||
auth_ldap_version: 3
|
||||
auth_ldap_suffix: ,ou=people,dc=example,dc=com
|
||||
auth_ldap_groupbase: ou=groups,dc=example,dc=com
|
||||
|
||||
auth_ldap_prefix: uid=
|
||||
auth_ldap_starttls: False
|
||||
auth_ldap_attr: {"uid": "uid"}
|
||||
auth_ldap_uid_attribute: uid
|
||||
auth_ldap_groups: {"nms_admin": {"level": 10}}
|
||||
auth_ldap_groupmemberattr: member
|
||||
auth_ldap_require_groupmembership: False
|
||||
auth_ldap_debug: False
|
||||
|
||||
auth_ldap_group: cn=groupname,ou=groups,dc=example,dc=com
|
||||
auth_ldap_groupmembertype: username
|
||||
auth_ldap_timeout: 5
|
||||
auth_ldap_emailattr: mail
|
||||
auth_ldap_userdn: True
|
||||
auth_ldap_userlist_filter:
|
||||
auth_ldap_wildcard_ou: False
|
||||
```
|
||||
|
||||
Read more [here](https://github.com/librenms/docker#configuration-management)
|
||||
|
||||
## Issue with current LibreNMS
|
||||
|
||||
The current version (23.7.0 at the time of writing) does not support lldap. A fix has been accepted to LibreNMS so the next version should just work.
|
||||
|
||||
[Link to the commit](https://github.com/librenms/librenms/commit/a71ca98fac1a75753b102be8b3644c4c3ee1a624)
|
||||
|
||||
If you want to apply the fix manually, run git apply with this patch.
|
||||
|
||||
```
|
||||
diff --git a/LibreNMS/Authentication/LdapAuthorizer.php b/LibreNMS/Authentication/LdapAuthorizer.php
|
||||
index 5459759ab..037a7382b 100644
|
||||
--- a/LibreNMS/Authentication/LdapAuthorizer.php
|
||||
+++ b/LibreNMS/Authentication/LdapAuthorizer.php
|
||||
@@ -233,7 +233,7 @@ class LdapAuthorizer extends AuthorizerBase
|
||||
$entries = ldap_get_entries($connection, $search);
|
||||
foreach ($entries as $entry) {
|
||||
$user = $this->ldapToUser($entry);
|
||||
- if ((int) $user['user_id'] !== (int) $user_id) {
|
||||
+ if ($user['user_id'] != $user_id) {
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -360,7 +360,7 @@ class LdapAuthorizer extends AuthorizerBase
|
||||
return [
|
||||
'username' => $entry['uid'][0],
|
||||
'realname' => $entry['cn'][0],
|
||||
- 'user_id' => (int) $entry[$uid_attr][0],
|
||||
+ 'user_id' => $entry[$uid_attr][0],
|
||||
'email' => $entry[Config::get('auth_ldap_emailattr', 'mail')][0],
|
||||
'level' => $this->getUserlevel($entry['uid'][0]),
|
||||
];
|
||||
```
|
||||
@@ -66,26 +66,5 @@ fi
|
||||
|
||||
DISPLAY_NAME=$(jq -r .displayName <<< $USER_JSON)
|
||||
|
||||
IS_ADMIN=false
|
||||
if [[ ! -z "$3" ]] && jq -e '.groups|map(.displayName)|index("'"$3"'")' <<< "$USER_JSON" > /dev/null 2>&1; then
|
||||
IS_ADMIN=true
|
||||
fi
|
||||
|
||||
IS_LOCAL=false
|
||||
if [[ ! -z "$4" ]] && jq -e '.groups|map(.displayName)|index("'"$4"'")' <<< "$USER_JSON" > /dev/null 2>&1; then
|
||||
IS_LOCAL=true
|
||||
fi
|
||||
|
||||
[[ ! -z "$DISPLAY_NAME" ]] && echo "name = $DISPLAY_NAME"
|
||||
|
||||
if [[ "$IS_ADMIN" = true ]]; then
|
||||
echo "group = system-admin"
|
||||
else
|
||||
echo "group = system-users"
|
||||
fi
|
||||
|
||||
if [[ "$IS_LOCAL" = true ]]; then
|
||||
echo "local_only = true"
|
||||
else
|
||||
echo "local_only = false"
|
||||
fi
|
||||
@@ -1,6 +1,6 @@
|
||||
[Unit]
|
||||
Description=Nitnelave LLDAP
|
||||
Documentation=https://github.com/lldap/lldap
|
||||
Documentation=https://github.com/nitnelave/lldap
|
||||
|
||||
# Only sqlite
|
||||
After=network.target
|
||||
|
||||
@@ -1,96 +0,0 @@
|
||||
# Mailserver Docker
|
||||
|
||||
[Docker-mailserver](https://docker-mailserver.github.io/docker-mailserver/latest/) is a Production-ready full-stack but simple mail server (SMTP, IMAP, LDAP, Antispam, Antivirus, etc.) running inside a container.
|
||||
|
||||
To integrate with LLDAP, ensure you correctly adjust the `docker-mailserver` container environment values.
|
||||
|
||||
## Compose File Sample
|
||||
```yaml
|
||||
version: "3.9"
|
||||
services:
|
||||
lldap:
|
||||
image: lldap/lldap:stable
|
||||
ports:
|
||||
- "3890:3890"
|
||||
- "17170:17170"
|
||||
volumes:
|
||||
- "lldap_data:/data"
|
||||
environment:
|
||||
- VERBOSE=true
|
||||
- TZ=Etc/UTC
|
||||
- LLDAP_JWT_SECRET=yourjwt
|
||||
- LLDAP_LDAP_USER_PASS=adminpassword
|
||||
- LLDAP_LDAP_BASE_DN=dc=example,dc=com
|
||||
|
||||
mailserver:
|
||||
image: ghcr.io/docker-mailserver/docker-mailserver:latest
|
||||
container_name: mailserver
|
||||
hostname: mail.example.com
|
||||
ports:
|
||||
- "25:25" # SMTP (explicit TLS => STARTTLS)
|
||||
- "143:143" # IMAP4 (explicit TLS => STARTTLS)
|
||||
- "465:465" # ESMTP (implicit TLS)
|
||||
- "587:587" # ESMTP (explicit TLS => STARTTLS)
|
||||
- "993:993" # IMAP4 (implicit TLS)
|
||||
volumes:
|
||||
- mailserver-data:/var/mail
|
||||
- mailserver-state:/var/mail-state
|
||||
- mailserver-config:/tmp/docker-mailserver/
|
||||
- /etc/localtime:/etc/localtime:ro
|
||||
restart: always
|
||||
stop_grace_period: 1m
|
||||
healthcheck:
|
||||
test: "ss --listening --tcp | grep -P 'LISTEN.+:smtp' || exit 1"
|
||||
timeout: 3s
|
||||
retries: 0
|
||||
environment:
|
||||
- LOG_LEVEL=debug
|
||||
- SUPERVISOR_LOGLEVEL=debug
|
||||
- SPAMASSASSIN_SPAM_TO_INBOX=1
|
||||
- ENABLE_FAIL2BAN=0
|
||||
- ENABLE_AMAVIS=0
|
||||
- SPOOF_PROTECTION=1
|
||||
- ENABLE_OPENDKIM=0
|
||||
- ENABLE_OPENDMARC=0
|
||||
# >>> Postfix LDAP Integration
|
||||
- ACCOUNT_PROVISIONER=LDAP
|
||||
- LDAP_SERVER_HOST=lldap:3890
|
||||
- LDAP_SEARCH_BASE=dc=example,dc=com
|
||||
- LDAP_BIND_DN=uid=admin,ou=people,dc=example,dc=com
|
||||
- LDAP_BIND_PW=adminpassword
|
||||
- LDAP_QUERY_FILTER_USER=(&(objectClass=inetOrgPerson)(|(uid=%u)(mail=%u)))
|
||||
- LDAP_QUERY_FILTER_GROUP=(&(objectClass=groupOfUniqueNames)(uid=%s))
|
||||
- LDAP_QUERY_FILTER_ALIAS=(&(objectClass=inetOrgPerson)(|(uid=%u)(mail=%u)))
|
||||
- LDAP_QUERY_FILTER_DOMAIN=((mail=*@%s))
|
||||
# <<< Postfix LDAP Integration
|
||||
# >>> Dovecot LDAP Integration
|
||||
- DOVECOT_AUTH_BIND=yes
|
||||
- DOVECOT_USER_FILTER=(&(objectClass=inetOrgPerson)(|(uid=%u)(mail=%u)))
|
||||
- DOVECOT_USER_ATTRS==uid=5000,=gid=5000,=home=/var/mail/%Ln,=mail=maildir:~/Maildir
|
||||
- POSTMASTER_ADDRESS=postmaster@d3n.com
|
||||
cap_add:
|
||||
- SYS_PTRACE
|
||||
- NET_ADMIN # For Fail2Ban to work
|
||||
|
||||
roundcubemail:
|
||||
image: roundcube/roundcubemail:latest
|
||||
container_name: roundcubemail
|
||||
restart: always
|
||||
volumes:
|
||||
- roundcube_data:/var/www/html
|
||||
ports:
|
||||
- "9002:80"
|
||||
environment:
|
||||
- ROUNDCUBEMAIL_DB_TYPE=sqlite
|
||||
- ROUNDCUBEMAIL_SKIN=elastic
|
||||
- ROUNDCUBEMAIL_DEFAULT_HOST=mailserver # IMAP
|
||||
- ROUNDCUBEMAIL_SMTP_SERVER=mailserver # SMTP
|
||||
|
||||
volumes:
|
||||
mailserver-data:
|
||||
mailserver-config:
|
||||
mailserver-state:
|
||||
lldap_data:
|
||||
roundcube_data:
|
||||
|
||||
```
|
||||
@@ -1,15 +0,0 @@
|
||||
## ADD after values in the existing .env file.
|
||||
## This example uses the unsecured 3890 port. For ldaps, set LDAP_METHOD=simple_tls and LDAP_PORT=6360
|
||||
## For more details, see https://github.com/joylarkin/mastodon-documentation/blob/master/Running-Mastodon/Enabling-LDAP-login.md
|
||||
LDAP_ENABLED=true
|
||||
LDAP_METHOD=plain
|
||||
LDAP_HOST=lldap
|
||||
LDAP_PORT=3890
|
||||
LDAP_BASE=dc=domain,dc=com
|
||||
LDAP_BIND_DN=uid=admin,ou=people,dc=domain,dc=com
|
||||
LDAP_PASSWORD=<lldap_admin_password_here>
|
||||
LDAP_UID=uid
|
||||
LDAP_MAIL=mail
|
||||
LDAP_UID_CONVERSION_ENABLED=true
|
||||
# match username or mail to authenticate, and onlow allow users belonging to group 'mastodon'
|
||||
LDAP_SEARCH_FILTER=(&(memberof=cn=mastodon,ou=groups,dc=domain,dc=com)(|(%{uid}=%{email})(%{mail}=%{email})))
|
||||
@@ -1,28 +0,0 @@
|
||||
# Mealie
|
||||
|
||||
Configuration is done solely with environmental variables in the mealie-api docker-compose config:
|
||||
|
||||
## Note
|
||||
[LDAP integration in Mealie currently only works with the nightly branch](https://github.com/hay-kot/mealie/issues/2402#issuecomment-1560176528), so `hkotel/mealie:api-nightly` and `hkotel/mealie:frontend-nightly` rather than the current "stable" release of `v1.0.0beta-5`
|
||||
|
||||
## Configuration
|
||||
|
||||
The following config should let you login with either members of the `mealie` group as a user, or as an admin user with members of the `mealie-admin` group.
|
||||
|
||||
Mealie first checks credentials in the `mealie` group to authenticate, then checks for the presence of the user in the `mealie-admin` group and elevates that account to admin status if present, therefore for any account to be an admin account it must belong in both the `mealie` group and the `mealie-admin` group.
|
||||
|
||||
It is recommended to create a `readonly_user` and add them to the `lldap_strict_readonly` group to bind with.
|
||||
|
||||
```yaml
|
||||
- LDAP_AUTH_ENABLED=true
|
||||
- LDAP_SERVER_URL=ldap://lldap:3890
|
||||
- LDAP_TLS_INSECURE=true ## Only required for LDAPS with a self-signed certificate
|
||||
- LDAP_BASE_DN=ou=people,dc=example,dc=com
|
||||
- LDAP_USER_FILTER=(memberof=cn=mealie,ou=groups,dc=example,dc=com)
|
||||
- LDAP_ADMIN_FILTER=(memberof=cn=mealie-admin,ou=groups,dc=example,dc=com)
|
||||
- LDAP_QUERY_BIND=cn=readonly_user,ou=people,dc=example,dc=com
|
||||
- LDAP_QUERY_PASSWORD=READONLY_USER_PASSWORD
|
||||
- LDAP_ID_ATTRIBUTE=uid
|
||||
- LDAP_NAME_ATTRIBUTE=displayName
|
||||
- LDAP_MAIL_ATTRIBUTE=mail
|
||||
```
|
||||
@@ -1,37 +0,0 @@
|
||||
# MinIO Configuration
|
||||
|
||||
MinIO is a High-Performance Object Storage released under GNU Affero General Public License v3. 0. It is API compatible with the Amazon S3 cloud storage service. This example assists with basic LDAP configuration and policy attachment.
|
||||
|
||||
## LDAP Config
|
||||
|
||||
### Navigation
|
||||
|
||||
- Login to the WebUI as a consoleAdmin user
|
||||
- Navigate to `Administrator > Identity > LDAP`
|
||||
- Click `Edit Configuration`
|
||||
|
||||
### Configuration Options
|
||||
|
||||
- Server Insecure: Enabled
|
||||
- Server Address: Hostname or IP for your LLDAP host
|
||||
- Lookup Bind DN: `uid=admin,ou=people,dc=example,dc=com`
|
||||
- It is recommended that you create a separate user account (e.g, `bind_user`) instead of `admin` for sharing Bind credentials with other services. The `bind_user` should be a member of the `lldap_strict_readonly` group to limit access to your LDAP configuration in LLDAP.
|
||||
- Lookup Bind Password: The password for the user referenced above
|
||||
- User DN Search Base: `ou=people,dc=example,dc=com`
|
||||
- User DN Search Filter: `(&(uid=%s)(memberOf=cn=minio_admin,ou=groups,dc=example,dc=com))`
|
||||
- This search filter will only allow users that are members of the `minio_admin` group to authenticate. To allow all lldap users, this filter can be used instead `(uid=%s)`
|
||||
- Group Search Base DN: `ou=groups,dc=example,dc=com`
|
||||
- Group Search Filter: `(member=%d)`
|
||||
|
||||
### Enable LDAP
|
||||
|
||||
> Note there appears to be a bug in some versions of MinIO where LDAP is enabled and working, however the configuration UI reports that it is not enabled.
|
||||
|
||||
Now, you can enable LDAP authentication by clicking the `Enable LDAP` button, a restart of the service or container is needed. With this configuration, LLDAP users will be able to log in to MinIO now. However they will not be able to do anything, as we need to attach policies giving permissions to users.
|
||||
|
||||
## Policy Attachment
|
||||
|
||||
Creating MinIO policies is outside of the scope for this document, but it is well documented by MinIO [here](https://min.io/docs/minio/linux/administration/identity-access-management/policy-based-access-control.html). Policies are written in JSON, are extremely flexible, and can be configured to be very granular. In this example we will be using one of the built-in Policies, `consoleAdmin`. We will be applying these policies with the `mc` command line utility.
|
||||
|
||||
- Alias your MinIO instance: `mc alias set myMinIO http://<your-minio-address>:<your-minio-api-port> admin <your-admin-password>`
|
||||
- Attach a policy to your LDAP group: `mc admin policy attach myMinIO consoleAdmin --group='cn=minio_admin,ou=groups,dc=example,dc=com'`
|
||||
@@ -2,7 +2,7 @@
|
||||
|
||||
If you're here, there are some assumptions being made about access and capabilities you have on your system:
|
||||
1. You have Authelia up and running, understand its functionality, and have read through the documentation.
|
||||
2. You have [LLDAP](https://github.com/lldap/lldap) up and running.
|
||||
2. You have [LLDAP](https://github.com/nitnelave/lldap) up and running.
|
||||
3. You have Nextcloud and LLDAP communicating and without any config errors. See the [example config for Nextcloud](nextcloud.md)
|
||||
|
||||
## Authelia
|
||||
@@ -70,7 +70,7 @@ _The first two can be any string you'd like to identify the connection with. The
|
||||
|
||||
* *_Do not_* use commas in the Nextcloud Social Login app scope! This caused many issues for me.
|
||||
* Be sure you update your Authelia `configuration.yml`. Specifically, the line: `redirect_uris`. The new URL should be
|
||||
`https://nextcloud.example.com/apps/sociallogin/custom_oidc/Authelia`, in some cases the URL also contains the index.php file and has to look like this `https://nextcloud.example.com/index.php/apps/sociallogin/custom_oidc/Authelia`. Check if your nextcloud has index.php in it's URL because if it has this won't work without and if it hasn't the link with index.php won't work.
|
||||
`https://auth.example.com/index.php/apps/sociallogin/custom_oidc/Authelia`.
|
||||
* The final field in the URL (Authelia) needs to be the same value you used in the Social Login "Internal Name" field.
|
||||
* If you've setup LLDAP correctly in nextcloud, the last dropdown for _Default Group_ should show you the `nextcloud_users` group you setup in LLDAP.
|
||||
|
||||
@@ -87,4 +87,4 @@ If this is set to *true* then the user flow will _skip_ the login page and autom
|
||||
### Conclusion
|
||||
And that's it! Assuming all the settings that worked for me, work for you, you should be able to login using OpenID Connect via Authelia. If you find any errors, it's a good idea to keep a document of all your settings from Authelia/Nextcloud/LLDAP etc so that you can easily reference and ensure everything lines up.
|
||||
|
||||
If you have any issues, please create a [discussion](https://github.com/lldap/lldap/discussions) or join the [Discord](https://discord.gg/h5PEdRMNyP).
|
||||
If you have any issues, please create a [discussion](https://github.com/nitnelave/lldap/discussions) or join the [Discord](https://discord.gg/h5PEdRMNyP).
|
||||
|
||||
@@ -1,39 +0,0 @@
|
||||
# Configuration for PowerDNS Admin
|
||||
|
||||
## Navigate
|
||||
|
||||
- Login to PowerDNS Admin
|
||||
- Navigate to: `Administration > Settings > Authentication`
|
||||
- Select the `LDAP` tab of the `Authentication Settings`
|
||||
|
||||
## LDAP Config
|
||||
|
||||
- Enable LDAP Authentication: Checked
|
||||
- Type: OpenLDAP
|
||||
|
||||
### Administrator Info
|
||||
|
||||
- LDAP URI: `ldap://<your-lldap-ip-or-hostname>:3890`
|
||||
- LDAP Base DN: `ou=people,dc=example,dc=com`
|
||||
- LDAP admin username: `uid=admin,ou=people,dc=example,dc=com`
|
||||
- It is recommended that you create a separate user account (e.g, `bind_user`) instead of `admin` for sharing Bind credentials with other services. The `bind_user` should be a member of the `lldap_strict_readonly` group to limit access to your LDAP configuration in LLDAP.
|
||||
- LDAP admin password: password of the user specified above
|
||||
|
||||
### Filters
|
||||
|
||||
- Basic filter: `(objectClass=person)`
|
||||
- Username field: `uid`
|
||||
- Group filter: `(objectClass=groupOfUniqueNames)`
|
||||
- Group name field: `member`
|
||||
|
||||
### Group Security (Optional)
|
||||
|
||||
> If Group Security is disabled, all users authenticated via LDAP will be given the "User" role.
|
||||
|
||||
Group Security is an optional configuration for LLDAP users. It provides a simple 1:1 mapping between LDAP groups, and PowerDNS roles.
|
||||
|
||||
- Status: On
|
||||
- Admin group: `cn=dns_admin,ou=groups,dc=example,dc=com`
|
||||
- Operator group: `cn=dns_operator,ou=groups,dc=example,dc=com`
|
||||
- User group: `cn=dns_user,ou=groups,dc=example,dc=com`
|
||||
|
||||
@@ -1,83 +0,0 @@
|
||||
# Proxmox VE Example
|
||||
|
||||
Proxmox Virtual Environment is a hyper-converged infrastructure open-source software. It is a hosted hypervisor that can run operating systems including Linux and Windows on x64 hardware. In this example we will setup user and group syncronization, with two example groups `proxmox_user` and `proxmox_admin`. This example was made using Proxmox VE 8.0.3.
|
||||
|
||||
## Navigation
|
||||
|
||||
- From the `Server View` open the `Datacenter` page
|
||||
- Then in this page, open the `Permissions > Realms` menu
|
||||
- In this menu, select `Add > LDAP Server`
|
||||
|
||||
## General Options
|
||||
|
||||
- Realm: The internal proxmox name for this authentication method
|
||||
- Base Domain Name: `dc=example,dc=com`
|
||||
- User Attribute Name: `uid`
|
||||
- Server: Your LLDAP hostname or IP
|
||||
- Port: `3890`
|
||||
- SSL: Leave unchecked unless you're using LDAPS
|
||||
- Comment: This field will be exposed as the "name" in the login page
|
||||
|
||||
## Sync Options
|
||||
|
||||
- Bind User: `uid=admin,ou=people,dc=example,dc=com`
|
||||
- It is recommended that you create a separate user account (e.g, `bind_user`) instead of `admin` for sharing Bind credentials with other services. The `bind_user` should be a member of the `lldap_strict_readonly` group to limit access to your LDAP configuration in LLDAP.
|
||||
- Bind Password: password of the user specified above
|
||||
- E-Mail Attribute: `mail`
|
||||
- Groupname attr: `cn`
|
||||
- User Filter: `(&(objectClass=person)(|(memberof=cn=proxmox_user,ou=groups,dc=example,dc=com)(memberof=cn=proxmox_admin,ou=groups,dc=example,dc=com)))`
|
||||
- This filter will only copy users that are members of the `proxmox_user` or `proxmox_admin` groups. If you want to enable all users in lldap, this filter can be used: `(objectClass=person)`
|
||||
- Group Filter: `(&(objectClass=groupofuniquenames)(|(cn=proxmox_user)(cn=proxmox_admin)))`
|
||||
- This filter will only copy the `proxmox_user` or `proxmox_admin` groups explicitly. If you want to sync all groups, this filter can be used: `(objectClass=groupofnames)`
|
||||
- Default Sync Options:
|
||||
- Scope: `Users and Groups`
|
||||
- Remove Vanished Options
|
||||
- Entry: Checked
|
||||
- Properties: Checked
|
||||
|
||||
## Syncronizing
|
||||
|
||||
Proxmox operates LDAP authentication by syncronizing with your lldap server to a local database. This sync can be triggered manually, and on a scheduled basis. Proxmox also offers a preview feature, which will report any changes to the local DB from a sync, without applying the changes. It is highly recommended to run a preview on your first syncronization after making any filter changes, to ensure syncronization is happening as expected.
|
||||
|
||||
### First Sync
|
||||
|
||||
- With the options saved, and from the `Permissions > Realms` page, select the LDAP realm you just created and click `Sync`
|
||||
- At the sync dialog, click the Preview button, and carefully check the output to ensure all the users and groups you expect are seen, and that nothing is being remove unexpectedly.
|
||||
- Once the preview output is matching what we expect, we can click the Sync button, on the `Realm Sync` dialog for the ldap realm we created.
|
||||
|
||||
### Scheduled Sync (Optional)
|
||||
|
||||
- Once we are confident that LDAP syncronizing is working as expected, this can be scheduled as a job from the `Permissions > Realms` page.
|
||||
- On the second half of the page, click `Add` under `Realm Sync Jobs`
|
||||
- Set a schedule for this job and click `Create`
|
||||
|
||||
## ACLs
|
||||
|
||||
Once you have users and groups syncronized from lldap, it is necessary to grant some perimssions to these users or groups so that they are able to use Proxmox. Proxmox handles this with a filesystem-like tree structure, and "roles" which are collections of permissions. In our basic example, we will grant the built-in `Administrator` role to our `proxmox_admin` role to the entire system. Then we will also grant the `proxmox_user` group several roles with different paths so they can clone and create VMs within a specific resource pool (`UserVMs`), but are otherwise restricted from editing or deleting other resources.
|
||||
|
||||
> Note that Promox appends the realm name to groups when syncing, so if you named your realm `lldap` the groups as synced will be `proxmox_user-lldap` and `proxmox_admin-lldap`
|
||||
|
||||
### Administrator
|
||||
|
||||
- From the Datacenter pane, select the `Permissions` menu page.
|
||||
- Click `Add > Group Permission`
|
||||
- Path: Type or select `/`
|
||||
- Group: Type or select the admin group that has syncronized (`proxmox_admin-lldap` in our example)
|
||||
- Role: `Administrator`
|
||||
- Finish by clicking the `Add` button and this access should now be granted
|
||||
|
||||
### User Role
|
||||
|
||||
> This example assumes we have created Resource Pools named `UserVMs` and `Templates`
|
||||
|
||||
- From the Datacenter pane, select the `Permissions` menu page.
|
||||
- We will be adding six rules in total, for each one clicking `Add > Group Permission`
|
||||
- Path: `/pool/UserVMs`, Group: `proxmox_user-lldap`, Role: PVEVMAdmin
|
||||
- Path: `/pool/UserVMs`, Group: `proxmox_user-lldap`, Role: PVEPoolAdmin
|
||||
- Path: `/pool/Templates`, Group: `proxmox_user-lldap`, Role: PVEPoolUser
|
||||
- Path: `/pool/Templates`, Group: `proxmox_user-lldap`, Role: PVETemplateUser
|
||||
- The following two rules are based on a default setup of Proxmox, and may need to be updated based on your networking and storage configuration
|
||||
- Path: `/sdn/zones/localnetwork`, Group: `proxmox_user-lldap`, Role: PVESDNUser
|
||||
- Path: `/storage/local-lvm`, Group: `proxmox_user-lldap`, Role: PVEDatastoreUser
|
||||
|
||||
That completes our basic example. The ACL rules in Proxmox are very flexible though, and custom roles can be created as well. The Proxmox documentation on [User Management](https://pve.proxmox.com/wiki/User_Management) goes into more depth if you wish to write a policy that better fits your use case.
|
||||
@@ -1,38 +1,7 @@
|
||||
# Configuration for Seafile
|
||||
Seafile can be bridged to LLDAP directly, or by using Authelia as an intermediary. This document will guide you through both setups.
|
||||
|
||||
## Configuring Seafile v11.0+ to use LLDAP directly
|
||||
Starting Seafile v11.0 :
|
||||
- CCNET module doesn't exist anymore
|
||||
- More flexibility is given to authenticate in seafile : ID binding can now be different from user email, so LLDAP UID can be used.
|
||||
|
||||
Add the following to your `seafile/conf/seahub_settings.py` :
|
||||
```
|
||||
ENABLE_LDAP = True
|
||||
LDAP_SERVER_URL = 'ldap://192.168.1.100:3890'
|
||||
LDAP_BASE_DN = 'ou=people,dc=example,dc=com'
|
||||
LDAP_ADMIN_DN = 'uid=admin,ou=people,dc=example,dc=com'
|
||||
LDAP_ADMIN_PASSWORD = 'CHANGE_ME'
|
||||
LDAP_PROVIDER = 'ldap'
|
||||
LDAP_LOGIN_ATTR = 'uid'
|
||||
LDAP_CONTACT_EMAIL_ATTR = 'mail'
|
||||
LDAP_USER_ROLE_ATTR = ''
|
||||
LDAP_USER_FIRST_NAME_ATTR = 'givenName'
|
||||
LDAP_USER_LAST_NAME_ATTR = 'sn'
|
||||
LDAP_USER_NAME_REVERSE = False
|
||||
```
|
||||
|
||||
* Replace `192.168.1.100:3890` with your LLDAP server's ip/hostname and port.
|
||||
* Replace every instance of `dc=example,dc=com` with your configured domain.
|
||||
|
||||
After restarting the Seafile server, users should be able to log in with their UID and password.
|
||||
|
||||
Note : There is currently no ldap binding for users' avatar. If interested, do [mention it](https://forum.seafile.com/t/feature-request-avatar-picture-from-ldap/3350/6) to the developers to give more credit to the feature.
|
||||
|
||||
## Configuring Seafile (prior to v11.0) to use LLDAP directly
|
||||
**Note for Seafile before v11:** Seafile's LDAP interface used to require a unique, immutable user identifier in the format of `username@domain`. This isn't true starting Seafile v11.0 and ulterior versions (see previous section Configuring Seafile v11.0+ §).
|
||||
For Seafile instances prior to v11, since LLDAP does not provide an attribute like `userPrincipalName`, the only attribute that somewhat qualifies is therefore `mail`. However, using `mail` as the user identifier results in the issue that Seafile will treat you as an entirely new user if you change your email address through LLDAP.
|
||||
Seafile's LDAP interface requires a unique, immutable user identifier in the format of `username@domain`. Since LLDAP does not provide an attribute like `userPrincipalName`, the only attribute that somewhat qualifies is therefore `mail`. However, using `mail` as the user identifier results in the issue that Seafile will treat you as an entirely new user if you change your email address through LLDAP. If this is not an issue for you, you can configure LLDAP as an authentication source in Seafile directly. A better but more elaborate way to use Seafile with LLDAP is by using Authelia as an intermediary. This document will guide you through both setups.
|
||||
|
||||
## Configuring Seafile to use LLDAP directly
|
||||
Add the following to your `seafile/conf/ccnet.conf` file:
|
||||
```
|
||||
[LDAP]
|
||||
@@ -117,4 +86,4 @@ OAUTH_ATTRIBUTE_MAP = {
|
||||
}
|
||||
```
|
||||
|
||||
Restart both your Authelia and Seafile server. You should see a "Single Sign-On" button on Seafile's login page. Clicking it should redirect you to Authelia. If you use the [example config for Authelia](authelia_config.yml), you should be able to log in using your LLDAP User ID.
|
||||
Restart both your Authelia and Seafile server. You should see a "Single Sign-On" button on Seafile's login page. Clicking it should redirect you to Authelia. If you use the [example config for Authelia](authelia_config.yml), you should be able to log in using your LLDAP User ID.
|
||||
@@ -1,57 +0,0 @@
|
||||
# Squid
|
||||
|
||||
[Squid](http://www.squid-cache.org/) is a caching HTTP/HTTPS proxy.
|
||||
|
||||
This guide will show you how to configure it to allow any user of the group `proxy` to use the proxy server.
|
||||
|
||||
The configuration file `/etc/squid/squid.conf`
|
||||
```
|
||||
auth_param basic program /usr/lib/squid/basic_ldap_auth -b "dc=example,dc=com" -D "uid=admin,ou=people,dc=example,dc=com" -W /etc/squid/ldap_password -f "(&(memberOf=uid=proxy,ou=groups,dc=example,dc=com)(uid=%s))" -H ldap://IP_OF_LLDAP_SERVER:3890
|
||||
acl localnet src 0.0.0.1-0.255.255.255 # RFC 1122 "this" network (LAN)
|
||||
acl localnet src 10.0.0.0/8 # RFC 1918 local private network (LAN)
|
||||
acl localnet src 100.64.0.0/10 # RFC 6598 shared address space (CGN)
|
||||
acl localnet src 169.254.0.0/16 # RFC 3927 link-local (directly plugged) machines
|
||||
acl localnet src 172.16.0.0/12 # RFC 1918 local private network (LAN)
|
||||
acl localnet src 192.168.0.0/16 # RFC 1918 local private network (LAN)
|
||||
acl localnet src fc00::/7 # RFC 4193 local private network range
|
||||
acl localnet src fe80::/10 # RFC 4291 link-local (directly plugged) machines
|
||||
acl SSL_ports port 443
|
||||
acl Safe_ports port 80 # http
|
||||
acl Safe_ports port 21 # ftp
|
||||
acl Safe_ports port 443 # https
|
||||
acl Safe_ports port 70 # gopher
|
||||
acl Safe_ports port 210 # wais
|
||||
acl Safe_ports port 1025-65535 # unregistered ports
|
||||
acl Safe_ports port 280 # http-mgmt
|
||||
acl Safe_ports port 488 # gss-http
|
||||
acl Safe_ports port 591 # filemaker
|
||||
acl Safe_ports port 777 # multiling http
|
||||
http_access deny !Safe_ports
|
||||
http_access deny CONNECT !SSL_ports
|
||||
http_access allow localhost manager
|
||||
http_access deny manager
|
||||
include /etc/squid/conf.d/*.conf
|
||||
http_access allow localhost
|
||||
acl ldap-auth proxy_auth REQUIRED
|
||||
http_access allow ldap-auth
|
||||
# http_access deny all
|
||||
http_port 3128
|
||||
coredump_dir /var/spool/squid
|
||||
refresh_pattern ^ftp: 1440 20% 10080
|
||||
refresh_pattern ^gopher: 1440 0% 1440
|
||||
refresh_pattern -i (/cgi-bin/|\?) 0 0% 0
|
||||
refresh_pattern \/(Packages|Sources)(|\.bz2|\.gz|\.xz)$ 0 0% 0 refresh-ims
|
||||
refresh_pattern \/Release(|\.gpg)$ 0 0% 0 refresh-ims
|
||||
refresh_pattern \/InRelease$ 0 0% 0 refresh-ims
|
||||
refresh_pattern \/(Translation-.*)(|\.bz2|\.gz|\.xz)$ 0 0% 0 refresh-ims
|
||||
refresh_pattern . 0 20% 4320
|
||||
```
|
||||
The password for the binduser is stored in `/etc/squid/ldap_password` e.g.
|
||||
```
|
||||
PASSWORD_FOR_BINDUSER
|
||||
```
|
||||
|
||||
After you restart squid with `systemctl restart squid` check it is working with
|
||||
```
|
||||
curl -O -L "https://www.redhat.com/index.html" -x "user_name:password@proxy.example.com:3128"
|
||||
```
|
||||
@@ -1,37 +0,0 @@
|
||||
# Tandoor Recipes LDAP configuration
|
||||
|
||||
## LDAP settings are defined by environmental variables as defined in [Tandoor's documentation](https://docs.tandoor.dev/features/authentication/#ldap)
|
||||
|
||||
### #Required#
|
||||
It is recommended to have a read-only account to bind to
|
||||
```
|
||||
LDAP_AUTH=1
|
||||
AUTH_LDAP_SERVER_URI=ldap://lldap:3890/
|
||||
AUTH_LDAP_BIND_DN=uid=ro_admin,ou=people,DC=example,DC=com
|
||||
AUTH_LDAP_BIND_PASSWORD=CHANGEME
|
||||
AUTH_LDAP_USER_SEARCH_BASE_DN=ou=people,DC=example,DC=com
|
||||
```
|
||||
|
||||
### #Optional#
|
||||
|
||||
By default it authenticates everybody identified by the search base DN, this allows you to pull certain users from the ```tandoor_users``` group
|
||||
```
|
||||
AUTH_LDAP_USER_SEARCH_FILTER_STR=(&(&(objectclass=person)(memberOf=cn=tandoor_users,ou=groups,dc=example,dc=com))(uid=%(user)s))
|
||||
```
|
||||
|
||||
Map Tandoor user fields with their LLDAP counterparts
|
||||
```
|
||||
AUTH_LDAP_USER_ATTR_MAP={'first_name': 'givenName', 'last_name': 'sn', 'email': 'mail'}
|
||||
```
|
||||
|
||||
Set whether or not to always update user fields at login and how many seconds for a timeout
|
||||
```
|
||||
AUTH_LDAP_ALWAYS_UPDATE_USER=1
|
||||
AUTH_LDAP_CACHE_TIMEOUT=3600
|
||||
```
|
||||
|
||||
If you use secure LDAP
|
||||
```
|
||||
AUTH_LDAP_START_TLS=1
|
||||
AUTH_LDAP_TLS_CACERTFILE=/etc/ssl/certs/own-ca.pem
|
||||
```
|
||||
@@ -1,43 +0,0 @@
|
||||
# Basic LDAP auth for a The Lounge IRC web-client
|
||||
|
||||
[Main documentation here.](https://thelounge.chat/docs/configuration#ldap-support)
|
||||
|
||||
## Simple Config:
|
||||
|
||||
In this config, The Lounge will use the credentials provided in web ui to authenticate with lldap. It'll allow access if authentication was successful.
|
||||
|
||||
```
|
||||
ldap: {
|
||||
enable: true,
|
||||
url: "ldap://localhost:389",
|
||||
tlsOptions: {},
|
||||
primaryKey: "uid",
|
||||
baseDN: "ou=people,dc=example,dc=com",
|
||||
},
|
||||
```
|
||||
|
||||
|
||||
## Advanced Config:
|
||||
|
||||
`rootDN` is similar to bind DN in other applications. It is used in combination with `rootPassword` to query lldap. `ldap-viewer` user in `lldap` is a member of the group `lldap_strict_readonly` group. This gives `ldap-viewer` user permission to query `lldap`.
|
||||
|
||||
|
||||
With the `filter`, You can limit The Lounge access to users who are a member of the group `thelounge`.
|
||||
|
||||
|
||||
```
|
||||
ldap: {
|
||||
enable: true,
|
||||
url: "ldap://localhost:389",
|
||||
tlsOptions: {},
|
||||
primaryKey: "uid",
|
||||
searchDN: {
|
||||
rootDN: "uid=ldap-viewer,ou=people,dc=example,dc=com",
|
||||
rootPassword: ""
|
||||
filter: "(memberOf=cn=thelounge,ou=groups,dc=example,dc=com)",
|
||||
base: "dc=example,dc=com",
|
||||
scope: "sub",
|
||||
},
|
||||
},
|
||||
```
|
||||
|
||||
@@ -1,16 +0,0 @@
|
||||
<!-- Append at the end of the <entry> sections in traccar.xml -->
|
||||
<entry key='ldap.enable'>true</entry>
|
||||
<!-- Important: the LDAP port must be specified in both ldap.url and ldap.port -->
|
||||
<entry key='ldap.url'>ldap://lldap:3890</entry>
|
||||
<entry key='ldap.port'>3890</entry>
|
||||
<entry key='ldap.user'>UID=admin,OU=people,DC=domain,DC=com</entry>
|
||||
<entry key='ldap.password'>BIND_USER_PASSWORD_HERE</entry>
|
||||
<entry key='ldap.force'>true</entry>
|
||||
<entry key='ldap.base'>OU=people,DC=domain,DC=com</entry>
|
||||
<entry key='ldap.idAttribute'>uid</entry>
|
||||
<entry key='ldap.nameAttribute'>cn</entry>
|
||||
<entry key='ldap.mailAttribute'>mail</entry>
|
||||
<!-- Only allow users belonging to group 'traccar' to login -->
|
||||
<entry key='ldap.searchFilter'>(&(|(uid=:login)(mail=:login))(memberOf=cn=traccar,ou=groups,dc=domain,dc=com))</entry>
|
||||
<!-- Make new users administrators if they belong to group 'lldap_admin' -->
|
||||
<entry key='ldap.adminFilter'>(&(|(uid=:login)(mail=:login))(memberOf=cn=lldap_admin,ou=groups,dc=domain,dc=com))</entry>
|
||||
@@ -49,7 +49,7 @@ mail
|
||||
```
|
||||
### Display Name Field Mapping
|
||||
```
|
||||
cn
|
||||
givenname
|
||||
```
|
||||
### Avatar Picture Field Mapping
|
||||
```
|
||||
|
||||
@@ -1,56 +0,0 @@
|
||||
# Zabbix Web Configuration
|
||||
|
||||
This example is for the Zabbix Web interface version 6.0, which is the supported LTS version as of August 2023. Later versions have additional options.
|
||||
|
||||
For the associated 6.0 documentation see [here](https://www.zabbix.com/documentation/6.0/en/manual/web_interface/frontend_sections/administration/authentication) and for the current manual see [here](https://www.zabbix.com/documentation/current/en/manual).
|
||||
|
||||
***Note that an LDAP user must exist in Zabbix Web as well, however its Zabbix password will not be used.*** When creating the user in Zabbix, the user should also be added to your desired Zabbix roles/groups.
|
||||
|
||||
## Configure LDAP Settings
|
||||
|
||||
- Log in to the web interface as an admin
|
||||
- Navigate to `Administration > Authentication > LDAP Settings`
|
||||
|
||||
### Enable LDAP authentication
|
||||
|
||||
Checked
|
||||
|
||||
### LDAP host
|
||||
|
||||
URI of your LLDAP host. Example: `ldap://ldap.example.com:3890` or `ldaps://ldap.example.com:6360` for TLS.
|
||||
|
||||
### Port
|
||||
|
||||
Not used when using a full LDAP URI as above, but feel free to put `3890` or `6360` for TLS.
|
||||
|
||||
### Base DN
|
||||
|
||||
Your LLDAP_LDAP_BASE. Example: `dc=example,dc=com`
|
||||
|
||||
### Search attribute
|
||||
|
||||
`uid`
|
||||
|
||||
### Case-sensitive login
|
||||
|
||||
Checked
|
||||
|
||||
### Bind DN
|
||||
|
||||
`uid=admin,ou=people,dc=example,dc=com`
|
||||
|
||||
Alternately, it is recommended that you create a separate user account (e.g, `bind_user`) instead of `admin` for sharing Bind credentials with other services. The `bind_user` should be a member of the `lldap_strict_readonly` group to limit access to your LDAP configuration in LLDAP.
|
||||
|
||||
### Bind password
|
||||
|
||||
Password for the above bind DN user.
|
||||
|
||||
### Test authentication
|
||||
|
||||
The test authentication `Login` and `User password` must be used to check the connection and whether an LDAP user can be successfully authenticated. Zabbix will not activate LDAP authentication if it is unable to authenticate the test user.
|
||||
|
||||
## Enable LDAP in Zabbix Web
|
||||
|
||||
- Navigate to `Administration > Authentication > Authentication` (the first tab)
|
||||
- Set "Default authentication" to "LDAP"
|
||||
- Click "Update"
|
||||
@@ -1,51 +0,0 @@
|
||||
# Configuration for Zitadel
|
||||
In Zitadel, go to `Instance > Settings` for instance-wide LDAP setup or `<Organization Name> > Settings` for organization-wide LDAP setup.
|
||||
|
||||
## Identity Providers Setup
|
||||
Click `Identity Providers` and select `Active Directory/LDAP`.
|
||||
|
||||
**Group filter is not supported in `Zitadel` at the time of writing.**
|
||||
|
||||
Replace every instance of `dc=example,dc=com` with your configured domain.
|
||||
### Connection
|
||||
* Name: The name to identify your identity provider
|
||||
* Servers: `ldaps://<FQDN or Host IP>:<Port for LADPS>` or `ldap://<FQDN or Host IP>:<Port for LADP>`
|
||||
* BaseDn: `dc=example,dc=com`
|
||||
* BindDn: `cn=admin,ou=people,dc=example,dc=com`. It is recommended that you create a separate user account (e.g, `bind_user`) instead of `admin` for sharing Bind credentials with other services. The `bind_user` should be a member of the `lldap_strict_readonly` group to limit access to your LDAP configuration in LLDAP.
|
||||
* Bind Password: `<user password>`
|
||||
|
||||
### User binding
|
||||
* Userbase: `dn`
|
||||
* User filters: `uid`. `mail` will not work.
|
||||
* User Object Classes: `person`
|
||||
|
||||
### LDAP Attributes
|
||||
* ID attribute: `uid`
|
||||
* displayName attribute: `cn`
|
||||
* Email attribute: `mail`
|
||||
* Given name attribute: `givenName`
|
||||
* Family name attribute: `lastName`
|
||||
* Preferred username attribute: `uid`
|
||||
|
||||
### optional
|
||||
The following section applied to `Zitadel` only, nothing will change on `LLDAP` side.
|
||||
|
||||
* Account creation allowed [x]
|
||||
* Account linking allowed [x]
|
||||
|
||||
**Either one of them or both of them must be enabled**
|
||||
|
||||
**DO NOT** enable `Automatic update` if you haven't setup a smtp server. Zitadel will update account's email and sent a verification code to verify the address.
|
||||
If you don't have a smtp server setup correctly and the email adress of `ZITADEL Admin` is changed, you are **permanently** locked out.
|
||||
|
||||
`Automatic creation` can automatically create a new account without user interaction when `Given name attribute`, `Family name attribute`, `Email attribute`, and `Preferred username attribute` are presented.
|
||||
|
||||
## Enable Identity Provider
|
||||
After clicking `Save`, you will be redirected to `Identity Providers` page.
|
||||
|
||||
Enable the LDAP by hovering onto the item and clicking the checkmark (`set as available`)
|
||||
|
||||
## Enable LDAP Login
|
||||
Under `Settings`, select `Login Behavior and Security`
|
||||
|
||||
Under `Advanced`, enable `External IDP allowed`
|
||||
@@ -1,143 +0,0 @@
|
||||
# Configuration for Zulip
|
||||
|
||||
Zulip combines the immediacy of real-time chat with an email threading model.
|
||||
|
||||
Their ldap-documentation is here: [zulip.readthedocs.io](https://zulip.readthedocs.io/en/stable/production/authentication-methods.html#ldap-including-active-directory)
|
||||
|
||||
Zulip has two installation methods, either by running the recommended installer or by docker/podman compose.
|
||||
The way how the service is configured differs depending on the installation method, so keep in mind you will only need one of the following examples.
|
||||
|
||||
> Important info
|
||||
> The available/configured userdata will be automatically imported at the first login.
|
||||
> If you want to import it before the user logs in for the first time or
|
||||
> if you want to keep the data in sync with LLDAP you need to trigger the import by hand (or via cronjob).
|
||||
> `/home/zulip/deployments/current/manage.py sync_ldap_user_data`
|
||||
|
||||
## Container based configuration
|
||||
The following configuration takes place in the environment section of your compose-file.
|
||||
|
||||
1) Enable the LDAP authentication backend
|
||||
Find the line`ZULIP_AUTH_BACKENDS: "EmailAuthBackend"` and change it to `ZULIP_AUTH_BACKENDS: "ZulipLDAPAuthBackend,EmailAuthBackend"`.
|
||||
|
||||
2) Configure how to connect with LLDAP
|
||||
The user specified in `SETTING_AUTH_LDAP_BIND_DN` is used to querry data from LLDAP.
|
||||
Zulip is only able to authenticate users and read data via ldap it is not able to write data or change the users password.
|
||||
Because of this limitation we will use the group `lldap_strict_readonly` for this user.
|
||||
Add the following lines to your configuration and change the values according to your setup.
|
||||
```
|
||||
SETTING_AUTH_LDAP_SERVER_URI: "ldap://lldap:3890"
|
||||
SETTING_AUTH_LDAP_BIND_DN: "uid=zulip,ou=people,dc=example,dc=com"
|
||||
SECRETS_auth_ldap_bind_password: "superSECURE_Pa55word"
|
||||
```
|
||||
|
||||
3) Configure how to search for existing users
|
||||
Add the following lines to your configuration and change the values according to your setup.
|
||||
```
|
||||
SETTING_AUTH_LDAP_USER_SEARCH: >
|
||||
LDAPSearch("ou=people,dc=example,dc=com", ldap.SCOPE_SUBTREE, "(uid=%(user)s)")
|
||||
SETTING_LDAP_EMAIL_ATTR: mail
|
||||
SETTING_AUTH_LDAP_REVERSE_EMAIL_SEARCH: >
|
||||
LDAPSearch("ou=people,dc=example,dc=com", ldap.SCOPE_SUBTREE, "(mail=%(email)s)")
|
||||
SETTING_AUTH_LDAP_USERNAME_ATTR: "uid"
|
||||
```
|
||||
|
||||
4) Configure the user-data mapping
|
||||
This step is optional, the sample below shows the maximum of available options, you can use all of them or none.
|
||||
Add the following lines to your configuration and remove the fields you don't want to be synced.
|
||||
The field `"full_name": "cn"` is mandatory.
|
||||
```
|
||||
SETTING_AUTH_LDAP_USER_ATTR_MAP: >
|
||||
{"full_name": "cn","first_name": "givenName","last_name": "sn","avatar": "jpegPhoto"}
|
||||
```
|
||||
|
||||
5) Configure which groups are allowed to authenticate
|
||||
This step is optional, if you do not specify anything here all users from your LLDAP server will be able to login.
|
||||
This example will grant access to all users who are a member of `zulip_users`.
|
||||
Add the following lines to your configuration and change the values according to your setup.
|
||||
```
|
||||
ZULIP_CUSTOM_SETTINGS: "import django_auth_ldap"
|
||||
SETTING_AUTH_LDAP_GROUP_TYPE: "django_auth_ldap.config.GroupOfUniqueNamesType(name_attr='cn')"
|
||||
SETTING_AUTH_LDAP_REQUIRE_GROUP: "cn=zulip_users,ou=groups,dc=example,dc=com"
|
||||
SETTING_AUTH_LDAP_GROUP_SEARCH: >
|
||||
LDAPSearch("ou=groups,dc=example,dc=com", ldap.SCOPE_SUBTREE, "(objectClass=GroupOfUniqueNames)")
|
||||
```
|
||||
|
||||
6) Disallow local changes after importing userdata
|
||||
This step is optional, you may want disallow the user to change their name and avatar if you import this data via ldap.
|
||||
Add the following lines to your configuration and change the values according to your setup.
|
||||
```
|
||||
SETTING_NAME_CHANGES_DISABLED: True
|
||||
SETTING_AVATAR_CHANGES_DISABLED: True
|
||||
```
|
||||
> Important Info
|
||||
> Zulip will not write the user profile back to your ldap server.
|
||||
> If the user changes their profil in Zulip those changes will be overwritten when the next syncronzation with LLDAP is triggerd.
|
||||
> Allow changes to the user profile only if you do not plan to synchronize it with LLDAP regularly.
|
||||
|
||||
|
||||
|
||||
## Installer based configuration
|
||||
The following configuration takes place in the configuration-file `/etc/zulip/settings.py`.
|
||||
|
||||
1) Enable the LDAP authentication backend
|
||||
Find the line `AUTHENTICATION_BACKENDS` and uncomment `"zproject.backends.ZulipLDAPAuthBackend"`.
|
||||
|
||||
2) Configure how to connect with LLDAP
|
||||
The user specified in `AUTH_LDAP_BIND_DN` is used to querry data from LLDAP.
|
||||
Zulip is only able to authenticate users and read data via ldap it is not able to write data or change the users password.
|
||||
Because of this limitation we will use the group `lldap_strict_readonly` for this user.
|
||||
Uncomment the following lines in your configuration and change the values according to your setup.
|
||||
```
|
||||
AUTH_LDAP_SERVER_URI = "ldap://lldap:3890"
|
||||
AUTH_LDAP_BIND_DN = "uid=zulip,ou=people,dc=example,dc=com"
|
||||
```
|
||||
|
||||
The password corresponding to AUTH_LDAP_BIND_DN goes in `/etc/zulip/zulip-secrets.conf`.
|
||||
Add a single new line to that file like below.
|
||||
```
|
||||
auth_ldap_bind_password = superSECURE_Pa55word
|
||||
```
|
||||
|
||||
3) Configure how to search for existing users
|
||||
Uncomment the following lines in your configuration and change the values according to your setup.
|
||||
```
|
||||
AUTH_LDAP_USER_SEARCH = LDAPSearch("ou=people,dc=example,dc=com", ldap.SCOPE_SUBTREE, "(uid=%(user)s)")
|
||||
LDAP_EMAIL_ATTR = mail
|
||||
AUTH_LDAP_REVERSE_EMAIL_SEARCH = LDAPSearch("ou=people,dc=example,dc=com", ldap.SCOPE_SUBTREE, "(mail=%(email)s)")
|
||||
AUTH_LDAP_USERNAME_ATTR = "uid"
|
||||
```
|
||||
|
||||
4) Configure the user-data mapping
|
||||
This step is optional, the sample below shows the maximum of available options, you can use all of them or none.
|
||||
Find the line `AUTH_LDAP_USER_ATTR_MAP`, then uncomment the values you want to map and change the values according to your setup.
|
||||
```
|
||||
AUTH_LDAP_USER_ATTR_MAP = {
|
||||
"full_name": "cn",
|
||||
"first_name": "givenName",
|
||||
"last_name": "sn",
|
||||
"avatar": "jpegPhoto",
|
||||
}
|
||||
```
|
||||
|
||||
5) Configure which groups are allowed to authenticate
|
||||
This step is optional, if you do not specify anything here all users from your LLDAP server will be able to login.
|
||||
This example will grant access to all users who are a member of `zulip_users`.
|
||||
Add the following lines to your configuration and change the values according to your setup.
|
||||
```
|
||||
import django_auth_ldap
|
||||
AUTH_LDAP_GROUP_TYPE = "django_auth_ldap.config.GroupOfUniqueNamesType(name_attr='cn')"
|
||||
AUTH_LDAP_REQUIRE_GROUP = "cn=zulip_users,ou=groups,dc=example,dc=com"
|
||||
AUTH_LDAP_GROUP_SEARCH = LDAPSearch("ou=groups,dc=example,dc=com", ldap.SCOPE_SUBTREE, "(objectClass=GroupOfUniqueNames)")
|
||||
```
|
||||
|
||||
6) Disallow local changes after importing userdata
|
||||
This step is optional, you may want disallow the user to change their name and avatar if you import this data via ldap.
|
||||
Uncomment the following lines in your configuration and change the values according to your setup.
|
||||
```
|
||||
NAME_CHANGES_DISABLED: True
|
||||
AVATAR_CHANGES_DISABLED: True
|
||||
```
|
||||
> Important Info
|
||||
> Zulip will not write the user profile back to your ldap server.
|
||||
> If the user changes their profil in Zulip those changes will be overwritten when the next syncronzation with LLDAP is triggerd.
|
||||
> Allow changes to the user profile only if you do not plan to synchronize it with LLDAP regularly.
|
||||
@@ -1,12 +0,0 @@
|
||||
#! /bin/sh
|
||||
|
||||
function print_random () {
|
||||
LC_ALL=C tr -dc 'A-Za-z0-9!#%&()*+,-./:;<=>?@[\]^_{|}~' </dev/urandom | head -c 32
|
||||
}
|
||||
|
||||
/bin/echo -n "LLDAP_JWT_SECRET='"
|
||||
print_random
|
||||
echo "'"
|
||||
/bin/echo -n "LLDAP_KEY_SEED='"
|
||||
print_random
|
||||
echo "'"
|
||||
@@ -10,9 +10,7 @@
|
||||
## The host address that the LDAP server will be bound to.
|
||||
## To enable IPv6 support, simply switch "ldap_host" to "::":
|
||||
## To only allow connections from localhost (if you want to restrict to local self-hosted services),
|
||||
## change it to "127.0.0.1" ("::1" in case of IPv6).
|
||||
## If LLDAP server is running in docker, set it to "0.0.0.0" ("::" for IPv6) to allow connections
|
||||
## originating from outside the container.
|
||||
## change it to "127.0.0.1" ("::1" in case of IPv6)".
|
||||
#ldap_host = "0.0.0.0"
|
||||
|
||||
## The port on which to have the LDAP server.
|
||||
@@ -21,9 +19,7 @@
|
||||
## The host address that the HTTP server will be bound to.
|
||||
## To enable IPv6 support, simply switch "http_host" to "::".
|
||||
## To only allow connections from localhost (if you want to restrict to local self-hosted services),
|
||||
## change it to "127.0.0.1" ("::1" in case of IPv6).
|
||||
## If LLDAP server is running in docker, set it to "0.0.0.0" ("::" for IPv6) to allow connections
|
||||
## originating from outside the container.
|
||||
## change it to "127.0.0.1" ("::1" in case of IPv6)".
|
||||
#http_host = "0.0.0.0"
|
||||
|
||||
## The port on which to have the HTTP server, for user login and
|
||||
@@ -78,12 +74,6 @@
|
||||
## is just the default one.
|
||||
#ldap_user_pass = "REPLACE_WITH_PASSWORD"
|
||||
|
||||
## Force reset of the admin password.
|
||||
## Break glass in case of emergency: if you lost the admin password, you
|
||||
## can set this to true to force a reset of the admin password to the value
|
||||
## of ldap_user_pass above.
|
||||
# force_reset_admin_password = false
|
||||
|
||||
## Database URL.
|
||||
## This encodes the type of database (SQlite, MySQL, or PostgreSQL)
|
||||
## , the path, the user, password, and sometimes the mode (when
|
||||
@@ -98,20 +88,21 @@
|
||||
database_url = "sqlite:///data/users.db?mode=rwc"
|
||||
|
||||
## Private key file.
|
||||
## Not recommended, use key_seed instead.
|
||||
## Contains the secret private key used to store the passwords safely.
|
||||
## Note that even with a database dump and the private key, an attacker
|
||||
## would still have to perform an (expensive) brute force attack to find
|
||||
## each password.
|
||||
## Randomly generated on first run if it doesn't exist.
|
||||
## Alternatively, you can use key_seed to override this instead of relying on
|
||||
## a file.
|
||||
## Env variable: LLDAP_KEY_FILE
|
||||
#key_file = "/data/private_key"
|
||||
key_file = "/data/private_key"
|
||||
|
||||
## Seed to generate the server private key, see key_file above.
|
||||
## This can be any random string, the recommendation is that it's at least 12
|
||||
## characters long.
|
||||
## Env variable: LLDAP_KEY_SEED
|
||||
key_seed = "RanD0m STR1ng"
|
||||
#key_seed = "RanD0m STR1ng"
|
||||
|
||||
## Ignored attributes.
|
||||
## Some services will request attributes that are not present in LLDAP. When it
|
||||
|
||||
@@ -9,16 +9,16 @@ repository = "https://github.com/lldap/lldap"
|
||||
version = "0.4.2"
|
||||
|
||||
[dependencies]
|
||||
anyhow = "*"
|
||||
anyhow = "1"
|
||||
base64 = "0.13"
|
||||
rand = "0.8"
|
||||
requestty = "0.4.1"
|
||||
serde = "1"
|
||||
serde_json = "1"
|
||||
smallvec = "*"
|
||||
smallvec = "1"
|
||||
|
||||
[dependencies.lldap_auth]
|
||||
path = "../auth"
|
||||
version = "0.3"
|
||||
features = ["opaque_client"]
|
||||
|
||||
[dependencies.graphql_client]
|
||||
@@ -27,11 +27,11 @@ default-features = false
|
||||
version = "0.11"
|
||||
|
||||
[dependencies.reqwest]
|
||||
version = "*"
|
||||
version = "0.11"
|
||||
default-features = false
|
||||
features = ["json", "blocking", "rustls-tls"]
|
||||
|
||||
[dependencies.ldap3]
|
||||
version = "*"
|
||||
version = "0.11"
|
||||
default-features = false
|
||||
features = ["sync", "tls-rustls"]
|
||||
|
||||
@@ -194,7 +194,6 @@ impl TryFrom<ResultEntry> for User {
|
||||
first_name,
|
||||
last_name,
|
||||
avatar: avatar.map(base64::encode),
|
||||
attributes: None,
|
||||
},
|
||||
password,
|
||||
entry.dn,
|
||||
@@ -272,7 +271,7 @@ pub fn get_users(connection: &mut LdapClient) -> Result<Vec<User>, anyhow::Error
|
||||
.default(maybe_user_ou.unwrap_or_default())
|
||||
.auto_complete(|s, _| {
|
||||
let mut answers = autocomplete_domain_suffix(s, domain);
|
||||
answers.extend(all_ous.clone());
|
||||
answers.extend(all_ous.clone().into_iter());
|
||||
answers
|
||||
})
|
||||
.build();
|
||||
@@ -384,7 +383,7 @@ pub fn get_groups(connection: &mut LdapClient) -> Result<Vec<LdapGroup>> {
|
||||
.default(maybe_group_ou.unwrap_or_default())
|
||||
.auto_complete(|s, _| {
|
||||
let mut answers = autocomplete_domain_suffix(s, domain);
|
||||
answers.extend(all_ous.clone());
|
||||
answers.extend(all_ous.clone().into_iter());
|
||||
answers
|
||||
})
|
||||
.build();
|
||||
|
||||
@@ -136,7 +136,7 @@ fn try_login(
|
||||
let ClientLoginStartResult { state, message } =
|
||||
start_login(password, &mut rng).context("Could not initialize login")?;
|
||||
let req = ClientLoginStartRequest {
|
||||
username: username.into(),
|
||||
username: username.to_owned(),
|
||||
login_start_request: message,
|
||||
};
|
||||
let response = client
|
||||
|
||||
117
schema.graphql
generated
117
schema.graphql
generated
@@ -1,22 +1,17 @@
|
||||
type AttributeValue {
|
||||
name: String!
|
||||
value: [String!]!
|
||||
input EqualityConstraint {
|
||||
field: String!
|
||||
value: String!
|
||||
}
|
||||
|
||||
type Mutation {
|
||||
createUser(user: CreateUserInput!): User!
|
||||
createGroup(name: String!): Group!
|
||||
createGroupWithDetails(request: CreateGroupInput!): Group!
|
||||
updateUser(user: UpdateUserInput!): Success!
|
||||
updateGroup(group: UpdateGroupInput!): Success!
|
||||
addUserToGroup(userId: String!, groupId: Int!): Success!
|
||||
removeUserFromGroup(userId: String!, groupId: Int!): Success!
|
||||
deleteUser(userId: String!): Success!
|
||||
deleteGroup(groupId: Int!): Success!
|
||||
addUserAttribute(name: String!, attributeType: AttributeType!, isList: Boolean!, isVisible: Boolean!, isEditable: Boolean!): Success!
|
||||
addGroupAttribute(name: String!, attributeType: AttributeType!, isList: Boolean!, isVisible: Boolean!, isEditable: Boolean!): Success!
|
||||
deleteUserAttribute(name: String!): Success!
|
||||
deleteGroupAttribute(name: String!): Success!
|
||||
}
|
||||
|
||||
type Group {
|
||||
@@ -24,8 +19,6 @@ type Group {
|
||||
displayName: String!
|
||||
creationDate: DateTimeUtc!
|
||||
uuid: String!
|
||||
"User-defined attributes."
|
||||
attributes: [AttributeValue!]!
|
||||
"The groups to which this user belongs."
|
||||
users: [User!]!
|
||||
}
|
||||
@@ -46,13 +39,18 @@ input RequestFilter {
|
||||
"DateTime"
|
||||
scalar DateTimeUtc
|
||||
|
||||
"The fields that can be updated for a group."
|
||||
input UpdateGroupInput {
|
||||
id: Int!
|
||||
displayName: String
|
||||
}
|
||||
|
||||
type Query {
|
||||
apiVersion: String!
|
||||
user(userId: String!): User!
|
||||
users(filters: RequestFilter): [User!]!
|
||||
groups: [Group!]!
|
||||
group(groupId: Int!): Group!
|
||||
schema: Schema!
|
||||
}
|
||||
|
||||
"The details required to create a user."
|
||||
@@ -62,79 +60,7 @@ input CreateUserInput {
|
||||
displayName: String
|
||||
firstName: String
|
||||
lastName: String
|
||||
"Base64 encoded JpegPhoto." avatar: String
|
||||
"User-defined attributes." attributes: [AttributeValueInput!]
|
||||
}
|
||||
|
||||
type AttributeSchema {
|
||||
name: String!
|
||||
attributeType: AttributeType!
|
||||
isList: Boolean!
|
||||
isVisible: Boolean!
|
||||
isEditable: Boolean!
|
||||
isHardcoded: Boolean!
|
||||
}
|
||||
|
||||
"The fields that can be updated for a user."
|
||||
input UpdateUserInput {
|
||||
id: String!
|
||||
email: String
|
||||
displayName: String
|
||||
firstName: String
|
||||
lastName: String
|
||||
"Base64 encoded JpegPhoto." avatar: String
|
||||
"""
|
||||
Attribute names to remove.
|
||||
They are processed before insertions.
|
||||
""" removeAttributes: [String!]
|
||||
"""
|
||||
Inserts or updates the given attributes.
|
||||
For lists, the entire list must be provided.
|
||||
""" insertAttributes: [AttributeValueInput!]
|
||||
}
|
||||
|
||||
input EqualityConstraint {
|
||||
field: String!
|
||||
value: String!
|
||||
}
|
||||
|
||||
type Schema {
|
||||
userSchema: AttributeList!
|
||||
groupSchema: AttributeList!
|
||||
}
|
||||
|
||||
"The fields that can be updated for a group."
|
||||
input UpdateGroupInput {
|
||||
"The group ID." id: Int!
|
||||
"The new display name." displayName: String
|
||||
"""
|
||||
Attribute names to remove.
|
||||
They are processed before insertions.
|
||||
""" removeAttributes: [String!]
|
||||
"""
|
||||
Inserts or updates the given attributes.
|
||||
For lists, the entire list must be provided.
|
||||
""" insertAttributes: [AttributeValueInput!]
|
||||
}
|
||||
|
||||
input AttributeValueInput {
|
||||
"""
|
||||
The name of the attribute. It must be present in the schema, and the type informs how
|
||||
to interpret the values.
|
||||
""" name: String!
|
||||
"""
|
||||
The values of the attribute.
|
||||
If the attribute is not a list, the vector must contain exactly one element.
|
||||
Integers (signed 64 bits) are represented as strings.
|
||||
Dates are represented as strings in RFC3339 format, e.g. "2019-10-12T07:20:50.52Z".
|
||||
JpegPhotos are represented as base64 encoded strings. They must be valid JPEGs.
|
||||
""" value: [String!]!
|
||||
}
|
||||
|
||||
"The details required to create a group."
|
||||
input CreateGroupInput {
|
||||
displayName: String!
|
||||
"User-defined attributes." attributes: [AttributeValueInput!]
|
||||
avatar: String
|
||||
}
|
||||
|
||||
type User {
|
||||
@@ -146,27 +72,24 @@ type User {
|
||||
avatar: String
|
||||
creationDate: DateTimeUtc!
|
||||
uuid: String!
|
||||
"User-defined attributes."
|
||||
attributes: [AttributeValue!]!
|
||||
"The groups to which this user belongs."
|
||||
groups: [Group!]!
|
||||
}
|
||||
|
||||
type AttributeList {
|
||||
attributes: [AttributeSchema!]!
|
||||
}
|
||||
|
||||
enum AttributeType {
|
||||
STRING
|
||||
INTEGER
|
||||
JPEG_PHOTO
|
||||
DATE_TIME
|
||||
}
|
||||
|
||||
type Success {
|
||||
ok: Boolean!
|
||||
}
|
||||
|
||||
"The fields that can be updated for a user."
|
||||
input UpdateUserInput {
|
||||
id: String!
|
||||
email: String
|
||||
displayName: String
|
||||
firstName: String
|
||||
lastName: String
|
||||
avatar: String
|
||||
}
|
||||
|
||||
schema {
|
||||
query: Query
|
||||
mutation: Mutation
|
||||
|
||||
@@ -1,15 +1,9 @@
|
||||
#! /bin/bash
|
||||
|
||||
tables=("users" "groups" "memberships" "jwt_refresh_storage" "jwt_storage" "password_reset_tokens" "group_attribute_schema" "group_attributes")
|
||||
tables=("users" "groups" "memberships" "jwt_refresh_storage" "jwt_storage" "password_reset_tokens")
|
||||
echo ".header on"
|
||||
|
||||
for table in ${tables[@]}; do
|
||||
echo ".mode insert $table"
|
||||
echo "select * from $table;"
|
||||
done
|
||||
|
||||
echo ".mode insert user_attribute_schema"
|
||||
echo "select * from user_attribute_schema where user_attribute_schema_name not in ('first_name', 'last_name', 'avatar');"
|
||||
|
||||
echo ".mode insert user_attributes"
|
||||
echo "select * from user_attributes;"
|
||||
done
|
||||
@@ -8,7 +8,7 @@ keywords = ["cli", "ldap", "graphql", "server", "authentication"]
|
||||
license = "GPL-3.0-only"
|
||||
name = "lldap"
|
||||
repository = "https://github.com/lldap/lldap"
|
||||
version = "0.5.1-alpha"
|
||||
version = "0.5.0-alpha"
|
||||
|
||||
[dependencies]
|
||||
actix = "0.13"
|
||||
@@ -19,46 +19,45 @@ actix-server = "2"
|
||||
actix-service = "2"
|
||||
actix-web = "4.3"
|
||||
actix-web-httpauth = "0.8"
|
||||
anyhow = "*"
|
||||
anyhow = "1"
|
||||
async-trait = "0.1"
|
||||
base64 = "0.21"
|
||||
bincode = "1.3"
|
||||
cron = "*"
|
||||
cron = "0.12"
|
||||
derive_builder = "0.12"
|
||||
derive_more = "0.99"
|
||||
figment_file_provider_adapter = "0.1"
|
||||
futures = "*"
|
||||
futures-util = "*"
|
||||
futures = "0.3"
|
||||
futures-util = "0.3"
|
||||
hmac = "0.12"
|
||||
http = "*"
|
||||
http = "0.2"
|
||||
itertools = "0.10"
|
||||
juniper = "0.15"
|
||||
jwt = "0.16"
|
||||
lber = "0.4.1"
|
||||
ldap3_proto = "^0.4.3"
|
||||
log = "*"
|
||||
ldap3_proto = ">=0.3.1"
|
||||
log = "0.4"
|
||||
orion = "0.17"
|
||||
rand_chacha = "0.3"
|
||||
rustls-pemfile = "1"
|
||||
serde = "*"
|
||||
serde = "1"
|
||||
serde_bytes = "0.11"
|
||||
serde_json = "1"
|
||||
sha2 = "0.10"
|
||||
thiserror = "*"
|
||||
thiserror = "1"
|
||||
time = "0.3"
|
||||
tokio-rustls = "0.23"
|
||||
tokio-stream = "*"
|
||||
tokio-stream = "0.1"
|
||||
tokio-util = "0.7"
|
||||
tracing = "*"
|
||||
tracing = "0.1"
|
||||
tracing-actix-web = "0.7"
|
||||
tracing-attributes = "^0.1.21"
|
||||
tracing-log = "*"
|
||||
tracing-log = "0.1"
|
||||
urlencoding = "2"
|
||||
webpki-roots = "0.22.2"
|
||||
webpki-roots = "0.23"
|
||||
|
||||
[dependencies.chrono]
|
||||
features = ["serde"]
|
||||
version = "*"
|
||||
version = "0.4"
|
||||
|
||||
[dependencies.clap]
|
||||
features = ["std", "color", "suggestions", "derive", "env"]
|
||||
@@ -66,7 +65,7 @@ version = "4"
|
||||
|
||||
[dependencies.figment]
|
||||
features = ["env", "toml"]
|
||||
version = "*"
|
||||
version = "0.10"
|
||||
|
||||
[dependencies.tracing-subscriber]
|
||||
version = "0.3"
|
||||
@@ -78,8 +77,7 @@ default-features = false
|
||||
version = "0.10.1"
|
||||
|
||||
[dependencies.lldap_auth]
|
||||
path = "../auth"
|
||||
features = ["opaque_server", "opaque_client", "sea_orm"]
|
||||
version = "0.3"
|
||||
|
||||
[dependencies.opaque-ke]
|
||||
version = "0.6"
|
||||
@@ -90,23 +88,19 @@ version = "0.8"
|
||||
|
||||
[dependencies.secstr]
|
||||
features = ["serde"]
|
||||
version = "*"
|
||||
|
||||
[dependencies.strum]
|
||||
features = ["derive"]
|
||||
version = "0.25"
|
||||
version = "0.5"
|
||||
|
||||
[dependencies.tokio]
|
||||
features = ["full"]
|
||||
version = "1.25"
|
||||
|
||||
[dependencies.uuid]
|
||||
features = ["v1", "v3"]
|
||||
features = ["v3"]
|
||||
version = "1"
|
||||
|
||||
[dependencies.tracing-forest]
|
||||
features = ["smallvec", "chrono", "tokio"]
|
||||
version = "^0.1.6"
|
||||
version = "^0.1.4"
|
||||
|
||||
[dependencies.actix-tls]
|
||||
features = ["default", "rustls"]
|
||||
@@ -118,7 +112,7 @@ default-features = false
|
||||
version = "0.24"
|
||||
|
||||
[dependencies.sea-orm]
|
||||
version= "0.12"
|
||||
version= "0.11"
|
||||
default-features = false
|
||||
features = ["macros", "with-chrono", "with-uuid", "sqlx-all", "runtime-actix-rustls"]
|
||||
|
||||
@@ -131,15 +125,10 @@ features = ["rustls-tls-webpki-roots"]
|
||||
version = "0.20"
|
||||
features = ["dangerous_configuration"]
|
||||
|
||||
[dependencies.url]
|
||||
version = "2"
|
||||
features = ["serde"]
|
||||
|
||||
[dev-dependencies]
|
||||
assert_cmd = "2.0"
|
||||
mockall = "0.11.4"
|
||||
mockall = "0.11"
|
||||
nix = "0.26.2"
|
||||
pretty_assertions = "1"
|
||||
|
||||
[dev-dependencies.graphql_client]
|
||||
features = ["graphql_query_derive", "reqwest-rustls"]
|
||||
@@ -147,12 +136,12 @@ default-features = false
|
||||
version = "0.11"
|
||||
|
||||
[dev-dependencies.ldap3]
|
||||
version = "*"
|
||||
version = "0.11"
|
||||
default-features = false
|
||||
features = ["sync", "tls-rustls"]
|
||||
|
||||
[dev-dependencies.reqwest]
|
||||
version = "*"
|
||||
version = "0.11"
|
||||
default-features = false
|
||||
features = ["json", "blocking", "rustls-tls"]
|
||||
|
||||
@@ -164,7 +153,3 @@ features = ["file_locks"]
|
||||
[dev-dependencies.uuid]
|
||||
version = "1"
|
||||
features = ["v4"]
|
||||
|
||||
[dev-dependencies.figment]
|
||||
features = ["test"]
|
||||
version = "*"
|
||||
|
||||
@@ -1,50 +0,0 @@
|
||||
use crate::domain::types::{AttributeType, JpegPhoto, Serialized};
|
||||
use anyhow::{bail, Context as AnyhowContext};
|
||||
|
||||
pub fn deserialize_attribute_value(
|
||||
value: &[String],
|
||||
typ: AttributeType,
|
||||
is_list: bool,
|
||||
) -> anyhow::Result<Serialized> {
|
||||
if !is_list && value.len() != 1 {
|
||||
bail!("Attribute is not a list, but multiple values were provided",);
|
||||
}
|
||||
let parse_int = |value: &String| -> anyhow::Result<i64> {
|
||||
value
|
||||
.parse::<i64>()
|
||||
.with_context(|| format!("Invalid integer value {}", value))
|
||||
};
|
||||
let parse_date = |value: &String| -> anyhow::Result<chrono::NaiveDateTime> {
|
||||
Ok(chrono::DateTime::parse_from_rfc3339(value)
|
||||
.with_context(|| format!("Invalid date value {}", value))?
|
||||
.naive_utc())
|
||||
};
|
||||
let parse_photo = |value: &String| -> anyhow::Result<JpegPhoto> {
|
||||
JpegPhoto::try_from(value.as_str()).context("Provided image is not a valid JPEG")
|
||||
};
|
||||
Ok(match (typ, is_list) {
|
||||
(AttributeType::String, false) => Serialized::from(&value[0]),
|
||||
(AttributeType::String, true) => Serialized::from(&value),
|
||||
(AttributeType::Integer, false) => Serialized::from(&parse_int(&value[0])?),
|
||||
(AttributeType::Integer, true) => Serialized::from(
|
||||
&value
|
||||
.iter()
|
||||
.map(parse_int)
|
||||
.collect::<anyhow::Result<Vec<_>>>()?,
|
||||
),
|
||||
(AttributeType::DateTime, false) => Serialized::from(&parse_date(&value[0])?),
|
||||
(AttributeType::DateTime, true) => Serialized::from(
|
||||
&value
|
||||
.iter()
|
||||
.map(parse_date)
|
||||
.collect::<anyhow::Result<Vec<_>>>()?,
|
||||
),
|
||||
(AttributeType::JpegPhoto, false) => Serialized::from(&parse_photo(&value[0])?),
|
||||
(AttributeType::JpegPhoto, true) => Serialized::from(
|
||||
&value
|
||||
.iter()
|
||||
.map(parse_photo)
|
||||
.collect::<anyhow::Result<Vec<_>>>()?,
|
||||
),
|
||||
})
|
||||
}
|
||||
@@ -7,8 +7,6 @@ pub enum DomainError {
|
||||
AuthenticationError(String),
|
||||
#[error("Database error: `{0}`")]
|
||||
DatabaseError(#[from] sea_orm::DbErr),
|
||||
#[error("Database transaction error: `{0}`")]
|
||||
DatabaseTransactionError(#[from] sea_orm::TransactionError<sea_orm::DbErr>),
|
||||
#[error("Authentication protocol error for `{0}`")]
|
||||
AuthenticationProtocolError(#[from] lldap_auth::opaque::AuthenticationError),
|
||||
#[error("Unknown crypto error: `{0}`")]
|
||||
@@ -23,13 +21,4 @@ pub enum DomainError {
|
||||
InternalError(String),
|
||||
}
|
||||
|
||||
impl From<sea_orm::TransactionError<DomainError>> for DomainError {
|
||||
fn from(value: sea_orm::TransactionError<DomainError>) -> Self {
|
||||
match value {
|
||||
sea_orm::TransactionError::Connection(e) => e.into(),
|
||||
sea_orm::TransactionError::Transaction(e) => e,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub type Result<T> = std::result::Result<T, DomainError>;
|
||||
|
||||
@@ -1,8 +1,7 @@
|
||||
use crate::domain::{
|
||||
use super::{
|
||||
error::Result,
|
||||
types::{
|
||||
AttributeName, AttributeType, AttributeValue, Email, Group, GroupDetails, GroupId,
|
||||
GroupName, JpegPhoto, Serialized, User, UserAndGroups, UserColumn, UserId, Uuid,
|
||||
Group, GroupDetails, GroupId, JpegPhoto, User, UserAndGroups, UserColumn, UserId, Uuid,
|
||||
},
|
||||
};
|
||||
use async_trait::async_trait;
|
||||
@@ -54,10 +53,9 @@ pub enum UserRequestFilter {
|
||||
UserId(UserId),
|
||||
UserIdSubString(SubStringFilter),
|
||||
Equality(UserColumn, String),
|
||||
AttributeEquality(AttributeName, Serialized),
|
||||
SubString(UserColumn, SubStringFilter),
|
||||
// Check if a user belongs to a group identified by name.
|
||||
MemberOf(GroupName),
|
||||
MemberOf(String),
|
||||
// Same, by id.
|
||||
MemberOfId(GroupId),
|
||||
}
|
||||
@@ -77,13 +75,12 @@ pub enum GroupRequestFilter {
|
||||
And(Vec<GroupRequestFilter>),
|
||||
Or(Vec<GroupRequestFilter>),
|
||||
Not(Box<GroupRequestFilter>),
|
||||
DisplayName(GroupName),
|
||||
DisplayName(String),
|
||||
DisplayNameSubString(SubStringFilter),
|
||||
Uuid(Uuid),
|
||||
GroupId(GroupId),
|
||||
// Check if the group contains a user identified by uid.
|
||||
Member(UserId),
|
||||
AttributeEquality(AttributeName, Serialized),
|
||||
}
|
||||
|
||||
impl From<bool> for GroupRequestFilter {
|
||||
@@ -100,81 +97,28 @@ impl From<bool> for GroupRequestFilter {
|
||||
pub struct CreateUserRequest {
|
||||
// Same fields as User, but no creation_date, and with password.
|
||||
pub user_id: UserId,
|
||||
pub email: Email,
|
||||
pub email: String,
|
||||
pub display_name: Option<String>,
|
||||
pub first_name: Option<String>,
|
||||
pub last_name: Option<String>,
|
||||
pub avatar: Option<JpegPhoto>,
|
||||
pub attributes: Vec<AttributeValue>,
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Debug, Serialize, Deserialize, Clone, Default)]
|
||||
pub struct UpdateUserRequest {
|
||||
// Same fields as CreateUserRequest, but no with an extra layer of Option.
|
||||
pub user_id: UserId,
|
||||
pub email: Option<Email>,
|
||||
pub email: Option<String>,
|
||||
pub display_name: Option<String>,
|
||||
pub first_name: Option<String>,
|
||||
pub last_name: Option<String>,
|
||||
pub avatar: Option<JpegPhoto>,
|
||||
pub delete_attributes: Vec<AttributeName>,
|
||||
pub insert_attributes: Vec<AttributeValue>,
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Debug, Serialize, Deserialize, Clone, Default)]
|
||||
pub struct CreateGroupRequest {
|
||||
pub display_name: GroupName,
|
||||
pub attributes: Vec<AttributeValue>,
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct UpdateGroupRequest {
|
||||
pub group_id: GroupId,
|
||||
pub display_name: Option<GroupName>,
|
||||
pub delete_attributes: Vec<AttributeName>,
|
||||
pub insert_attributes: Vec<AttributeValue>,
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct AttributeSchema {
|
||||
pub name: AttributeName,
|
||||
//TODO: pub aliases: Vec<String>,
|
||||
pub attribute_type: AttributeType,
|
||||
pub is_list: bool,
|
||||
pub is_visible: bool,
|
||||
pub is_editable: bool,
|
||||
pub is_hardcoded: bool,
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct CreateAttributeRequest {
|
||||
pub name: AttributeName,
|
||||
pub attribute_type: AttributeType,
|
||||
pub is_list: bool,
|
||||
pub is_visible: bool,
|
||||
pub is_editable: bool,
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct AttributeList {
|
||||
pub attributes: Vec<AttributeSchema>,
|
||||
}
|
||||
|
||||
impl AttributeList {
|
||||
pub fn get_attribute_schema(&self, name: &AttributeName) -> Option<&AttributeSchema> {
|
||||
self.attributes.iter().find(|a| a.name == *name)
|
||||
}
|
||||
|
||||
pub fn get_attribute_type(&self, name: &AttributeName) -> Option<(AttributeType, bool)> {
|
||||
self.get_attribute_schema(name)
|
||||
.map(|a| (a.attribute_type, a.is_list))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(PartialEq, Eq, Debug, Serialize, Deserialize, Clone)]
|
||||
pub struct Schema {
|
||||
pub user_attributes: AttributeList,
|
||||
pub group_attributes: AttributeList,
|
||||
pub display_name: Option<String>,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
@@ -183,20 +127,20 @@ pub trait LoginHandler: Send + Sync {
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait GroupListerBackendHandler: ReadSchemaBackendHandler {
|
||||
pub trait GroupListerBackendHandler {
|
||||
async fn list_groups(&self, filters: Option<GroupRequestFilter>) -> Result<Vec<Group>>;
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait GroupBackendHandler: ReadSchemaBackendHandler {
|
||||
pub trait GroupBackendHandler {
|
||||
async fn get_group_details(&self, group_id: GroupId) -> Result<GroupDetails>;
|
||||
async fn update_group(&self, request: UpdateGroupRequest) -> Result<()>;
|
||||
async fn create_group(&self, request: CreateGroupRequest) -> Result<GroupId>;
|
||||
async fn create_group(&self, group_name: &str) -> Result<GroupId>;
|
||||
async fn delete_group(&self, group_id: GroupId) -> Result<()>;
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait UserListerBackendHandler: ReadSchemaBackendHandler {
|
||||
pub trait UserListerBackendHandler {
|
||||
async fn list_users(
|
||||
&self,
|
||||
filters: Option<UserRequestFilter>,
|
||||
@@ -205,7 +149,7 @@ pub trait UserListerBackendHandler: ReadSchemaBackendHandler {
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait UserBackendHandler: ReadSchemaBackendHandler {
|
||||
pub trait UserBackendHandler {
|
||||
async fn get_user_details(&self, user_id: &UserId) -> Result<User>;
|
||||
async fn create_user(&self, request: CreateUserRequest) -> Result<()>;
|
||||
async fn update_user(&self, request: UpdateUserRequest) -> Result<()>;
|
||||
@@ -215,20 +159,6 @@ pub trait UserBackendHandler: ReadSchemaBackendHandler {
|
||||
async fn get_user_groups(&self, user_id: &UserId) -> Result<HashSet<GroupDetails>>;
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait ReadSchemaBackendHandler {
|
||||
async fn get_schema(&self) -> Result<Schema>;
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait SchemaBackendHandler: ReadSchemaBackendHandler {
|
||||
async fn add_user_attribute(&self, request: CreateAttributeRequest) -> Result<()>;
|
||||
async fn add_group_attribute(&self, request: CreateAttributeRequest) -> Result<()>;
|
||||
// Note: It's up to the caller to make sure that the attribute is not hardcoded.
|
||||
async fn delete_user_attribute(&self, name: &AttributeName) -> Result<()>;
|
||||
async fn delete_group_attribute(&self, name: &AttributeName) -> Result<()>;
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait BackendHandler:
|
||||
Send
|
||||
@@ -237,17 +167,53 @@ pub trait BackendHandler:
|
||||
+ UserBackendHandler
|
||||
+ UserListerBackendHandler
|
||||
+ GroupListerBackendHandler
|
||||
+ ReadSchemaBackendHandler
|
||||
+ SchemaBackendHandler
|
||||
{
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use base64::Engine;
|
||||
use pretty_assertions::assert_ne;
|
||||
mockall::mock! {
|
||||
pub TestBackendHandler{}
|
||||
impl Clone for TestBackendHandler {
|
||||
fn clone(&self) -> Self;
|
||||
}
|
||||
#[async_trait]
|
||||
impl GroupListerBackendHandler for TestBackendHandler {
|
||||
async fn list_groups(&self, filters: Option<GroupRequestFilter>) -> Result<Vec<Group>>;
|
||||
}
|
||||
#[async_trait]
|
||||
impl GroupBackendHandler for TestBackendHandler {
|
||||
async fn get_group_details(&self, group_id: GroupId) -> Result<GroupDetails>;
|
||||
async fn update_group(&self, request: UpdateGroupRequest) -> Result<()>;
|
||||
async fn create_group(&self, group_name: &str) -> Result<GroupId>;
|
||||
async fn delete_group(&self, group_id: GroupId) -> Result<()>;
|
||||
}
|
||||
#[async_trait]
|
||||
impl UserListerBackendHandler for TestBackendHandler {
|
||||
async fn list_users(&self, filters: Option<UserRequestFilter>, get_groups: bool) -> Result<Vec<UserAndGroups>>;
|
||||
}
|
||||
#[async_trait]
|
||||
impl UserBackendHandler for TestBackendHandler {
|
||||
async fn get_user_details(&self, user_id: &UserId) -> Result<User>;
|
||||
async fn create_user(&self, request: CreateUserRequest) -> Result<()>;
|
||||
async fn update_user(&self, request: UpdateUserRequest) -> Result<()>;
|
||||
async fn delete_user(&self, user_id: &UserId) -> Result<()>;
|
||||
async fn get_user_groups(&self, user_id: &UserId) -> Result<HashSet<GroupDetails>>;
|
||||
async fn add_user_to_group(&self, user_id: &UserId, group_id: GroupId) -> Result<()>;
|
||||
async fn remove_user_from_group(&self, user_id: &UserId, group_id: GroupId) -> Result<()>;
|
||||
}
|
||||
#[async_trait]
|
||||
impl BackendHandler for TestBackendHandler {}
|
||||
#[async_trait]
|
||||
impl LoginHandler for TestBackendHandler {
|
||||
async fn bind(&self, request: BindRequest) -> Result<()>;
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use base64::Engine;
|
||||
|
||||
use super::*;
|
||||
#[test]
|
||||
fn test_uuid_time() {
|
||||
use chrono::prelude::*;
|
||||
|
||||
@@ -1,22 +1,19 @@
|
||||
use chrono::TimeZone;
|
||||
use ldap3_proto::{
|
||||
proto::LdapOp, LdapFilter, LdapPartialAttribute, LdapResultCode, LdapSearchResultEntry,
|
||||
};
|
||||
use tracing::{debug, instrument, warn};
|
||||
|
||||
use crate::domain::{
|
||||
deserialize::deserialize_attribute_value,
|
||||
handler::{GroupListerBackendHandler, GroupRequestFilter},
|
||||
ldap::error::LdapError,
|
||||
schema::{PublicSchema, SchemaGroupAttributeExtractor},
|
||||
types::{AttributeName, AttributeType, Group, UserId, Uuid},
|
||||
types::{Group, GroupColumn, UserId, Uuid},
|
||||
};
|
||||
|
||||
use super::{
|
||||
error::LdapResult,
|
||||
utils::{
|
||||
expand_attribute_wildcards, get_custom_attribute, get_group_id_from_distinguished_name,
|
||||
get_user_id_from_distinguished_name, map_group_field, GroupFieldType, LdapInfo,
|
||||
expand_attribute_wildcards, get_group_id_from_distinguished_name,
|
||||
get_user_id_from_distinguished_name, map_group_field, LdapInfo,
|
||||
},
|
||||
};
|
||||
|
||||
@@ -25,57 +22,40 @@ pub fn get_group_attribute(
|
||||
base_dn_str: &str,
|
||||
attribute: &str,
|
||||
user_filter: &Option<UserId>,
|
||||
ignored_group_attributes: &[AttributeName],
|
||||
schema: &PublicSchema,
|
||||
ignored_group_attributes: &[String],
|
||||
) -> Option<Vec<Vec<u8>>> {
|
||||
let attribute = AttributeName::from(attribute);
|
||||
let attribute_values = match map_group_field(&attribute, schema) {
|
||||
GroupFieldType::ObjectClass => vec![b"groupOfUniqueNames".to_vec()],
|
||||
let attribute = attribute.to_ascii_lowercase();
|
||||
let attribute_values = match attribute.as_str() {
|
||||
"objectclass" => vec![b"groupOfUniqueNames".to_vec()],
|
||||
// Always returned as part of the base response.
|
||||
GroupFieldType::Dn => return None,
|
||||
GroupFieldType::EntryDn => {
|
||||
vec![format!("uid={},ou=groups,{}", group.display_name, base_dn_str).into_bytes()]
|
||||
}
|
||||
GroupFieldType::DisplayName => vec![group.display_name.to_string().into_bytes()],
|
||||
GroupFieldType::CreationDate => vec![chrono::Utc
|
||||
.from_utc_datetime(&group.creation_date)
|
||||
.to_rfc3339()
|
||||
.into_bytes()],
|
||||
GroupFieldType::Member => group
|
||||
"dn" | "distinguishedname" => return None,
|
||||
"cn" | "uid" | "id" => vec![group.display_name.clone().into_bytes()],
|
||||
"entryuuid" | "uuid" => vec![group.uuid.to_string().into_bytes()],
|
||||
"member" | "uniquemember" => group
|
||||
.users
|
||||
.iter()
|
||||
.filter(|u| user_filter.as_ref().map(|f| *u == f).unwrap_or(true))
|
||||
.map(|u| format!("uid={},ou=people,{}", u, base_dn_str).into_bytes())
|
||||
.collect(),
|
||||
GroupFieldType::Uuid => vec![group.uuid.to_string().into_bytes()],
|
||||
GroupFieldType::Attribute(attr, _, _) => {
|
||||
get_custom_attribute::<SchemaGroupAttributeExtractor>(&group.attributes, &attr, schema)?
|
||||
"1.1" => return None,
|
||||
// We ignore the operational attribute wildcard
|
||||
"+" => return None,
|
||||
"*" => {
|
||||
panic!(
|
||||
"Matched {}, * should have been expanded into attribute list and * removed",
|
||||
attribute
|
||||
)
|
||||
}
|
||||
GroupFieldType::NoMatch => match attribute.as_str() {
|
||||
"1.1" => return None,
|
||||
// We ignore the operational attribute wildcard
|
||||
"+" => return None,
|
||||
"*" => {
|
||||
panic!(
|
||||
"Matched {}, * should have been expanded into attribute list and * removed",
|
||||
_ => {
|
||||
if !ignored_group_attributes.contains(&attribute) {
|
||||
warn!(
|
||||
r#"Ignoring unrecognized group attribute: {}\n\
|
||||
To disable this warning, add it to "ignored_group_attributes" in the config."#,
|
||||
attribute
|
||||
)
|
||||
);
|
||||
}
|
||||
_ => {
|
||||
if ignored_group_attributes.contains(&attribute) {
|
||||
return None;
|
||||
}
|
||||
get_custom_attribute::<SchemaGroupAttributeExtractor>(
|
||||
&group.attributes,
|
||||
&attribute,
|
||||
schema,
|
||||
).or_else(||{warn!(
|
||||
r#"Ignoring unrecognized group attribute: {}\n\
|
||||
To disable this warning, add it to "ignored_group_attributes" in the config."#,
|
||||
attribute
|
||||
);None})?
|
||||
}
|
||||
},
|
||||
return None;
|
||||
}
|
||||
};
|
||||
if attribute_values.len() == 1 && attribute_values[0].is_empty() {
|
||||
None
|
||||
@@ -102,8 +82,7 @@ fn make_ldap_search_group_result_entry(
|
||||
base_dn_str: &str,
|
||||
attributes: &[String],
|
||||
user_filter: &Option<UserId>,
|
||||
ignored_group_attributes: &[AttributeName],
|
||||
schema: &PublicSchema,
|
||||
ignored_group_attributes: &[String],
|
||||
) -> LdapSearchResultEntry {
|
||||
let expanded_attributes = expand_group_attribute_wildcards(attributes);
|
||||
|
||||
@@ -118,7 +97,6 @@ fn make_ldap_search_group_result_entry(
|
||||
a,
|
||||
user_filter,
|
||||
ignored_group_attributes,
|
||||
schema,
|
||||
)?;
|
||||
Some(LdapPartialAttribute {
|
||||
atype: a.to_string(),
|
||||
@@ -129,79 +107,59 @@ fn make_ldap_search_group_result_entry(
|
||||
}
|
||||
}
|
||||
|
||||
fn get_group_attribute_equality_filter(
|
||||
field: &AttributeName,
|
||||
typ: AttributeType,
|
||||
is_list: bool,
|
||||
value: &str,
|
||||
) -> LdapResult<GroupRequestFilter> {
|
||||
deserialize_attribute_value(&[value.to_owned()], typ, is_list)
|
||||
.map_err(|e| LdapError {
|
||||
code: LdapResultCode::Other,
|
||||
message: format!("Invalid value for attribute {}: {}", field, e),
|
||||
})
|
||||
.map(|v| GroupRequestFilter::AttributeEquality(field.clone(), v))
|
||||
}
|
||||
|
||||
fn convert_group_filter(
|
||||
ldap_info: &LdapInfo,
|
||||
filter: &LdapFilter,
|
||||
schema: &PublicSchema,
|
||||
) -> LdapResult<GroupRequestFilter> {
|
||||
let rec = |f| convert_group_filter(ldap_info, f, schema);
|
||||
let rec = |f| convert_group_filter(ldap_info, f);
|
||||
match filter {
|
||||
LdapFilter::Equality(field, value) => {
|
||||
let field = AttributeName::from(field.as_str());
|
||||
let value = value.to_ascii_lowercase();
|
||||
match map_group_field(&field, schema) {
|
||||
GroupFieldType::DisplayName => Ok(GroupRequestFilter::DisplayName(value.into())),
|
||||
GroupFieldType::Uuid => Ok(GroupRequestFilter::Uuid(
|
||||
Uuid::try_from(value.as_str()).map_err(|e| LdapError {
|
||||
code: LdapResultCode::InappropriateMatching,
|
||||
message: format!("Invalid UUID: {:#}", e),
|
||||
})?,
|
||||
)),
|
||||
GroupFieldType::Member => {
|
||||
let field = &field.to_ascii_lowercase();
|
||||
let value = &value.to_ascii_lowercase();
|
||||
match field.as_str() {
|
||||
"member" | "uniquemember" => {
|
||||
let user_name = get_user_id_from_distinguished_name(
|
||||
&value,
|
||||
value,
|
||||
&ldap_info.base_dn,
|
||||
&ldap_info.base_dn_str,
|
||||
)?;
|
||||
Ok(GroupRequestFilter::Member(user_name))
|
||||
}
|
||||
GroupFieldType::ObjectClass => Ok(GroupRequestFilter::from(matches!(
|
||||
"objectclass" => Ok(GroupRequestFilter::from(matches!(
|
||||
value.as_str(),
|
||||
"groupofuniquenames" | "groupofnames"
|
||||
))),
|
||||
GroupFieldType::Dn | GroupFieldType::EntryDn => {
|
||||
Ok(get_group_id_from_distinguished_name(
|
||||
value.as_str(),
|
||||
&ldap_info.base_dn,
|
||||
&ldap_info.base_dn_str,
|
||||
)
|
||||
.map(GroupRequestFilter::DisplayName)
|
||||
.unwrap_or_else(|_| {
|
||||
warn!("Invalid dn filter on group: {}", value);
|
||||
GroupRequestFilter::from(false)
|
||||
}))
|
||||
}
|
||||
GroupFieldType::NoMatch => {
|
||||
if !ldap_info.ignored_group_attributes.contains(&field) {
|
||||
warn!(
|
||||
r#"Ignoring unknown group attribute "{}" in filter.\n\
|
||||
To disable this warning, add it to "ignored_group_attributes" in the config."#,
|
||||
field
|
||||
);
|
||||
"dn" => Ok(get_group_id_from_distinguished_name(
|
||||
value.to_ascii_lowercase().as_str(),
|
||||
&ldap_info.base_dn,
|
||||
&ldap_info.base_dn_str,
|
||||
)
|
||||
.map(GroupRequestFilter::DisplayName)
|
||||
.unwrap_or_else(|_| {
|
||||
warn!("Invalid dn filter on group: {}", value);
|
||||
GroupRequestFilter::from(false)
|
||||
})),
|
||||
_ => match map_group_field(field) {
|
||||
Some(GroupColumn::DisplayName) => {
|
||||
Ok(GroupRequestFilter::DisplayName(value.to_string()))
|
||||
}
|
||||
Ok(GroupRequestFilter::from(false))
|
||||
}
|
||||
GroupFieldType::Attribute(field, typ, is_list) => {
|
||||
get_group_attribute_equality_filter(&field, typ, is_list, &value)
|
||||
}
|
||||
GroupFieldType::CreationDate => Err(LdapError {
|
||||
code: LdapResultCode::UnwillingToPerform,
|
||||
message: "Creation date filter for groups not supported".to_owned(),
|
||||
}),
|
||||
Some(GroupColumn::Uuid) => Ok(GroupRequestFilter::Uuid(
|
||||
Uuid::try_from(value.as_str()).map_err(|e| LdapError {
|
||||
code: LdapResultCode::InappropriateMatching,
|
||||
message: format!("Invalid UUID: {:#}", e),
|
||||
})?,
|
||||
)),
|
||||
_ => {
|
||||
if !ldap_info.ignored_group_attributes.contains(field) {
|
||||
warn!(
|
||||
r#"Ignoring unknown group attribute "{:?}" in filter.\n\
|
||||
To disable this warning, add it to "ignored_group_attributes" in the config."#,
|
||||
field
|
||||
);
|
||||
}
|
||||
Ok(GroupRequestFilter::from(false))
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
LdapFilter::And(filters) => Ok(GroupRequestFilter::And(
|
||||
@@ -212,23 +170,24 @@ fn convert_group_filter(
|
||||
)),
|
||||
LdapFilter::Not(filter) => Ok(GroupRequestFilter::Not(Box::new(rec(filter)?))),
|
||||
LdapFilter::Present(field) => {
|
||||
let field = AttributeName::from(field.as_str());
|
||||
Ok(GroupRequestFilter::from(!matches!(
|
||||
map_group_field(&field, schema),
|
||||
GroupFieldType::NoMatch
|
||||
)))
|
||||
let field = &field.to_ascii_lowercase();
|
||||
Ok(GroupRequestFilter::from(
|
||||
field == "objectclass"
|
||||
|| field == "dn"
|
||||
|| field == "distinguishedname"
|
||||
|| map_group_field(field).is_some(),
|
||||
))
|
||||
}
|
||||
LdapFilter::Substring(field, substring_filter) => {
|
||||
let field = AttributeName::from(field.as_str());
|
||||
match map_group_field(&field, schema) {
|
||||
GroupFieldType::DisplayName => Ok(GroupRequestFilter::DisplayNameSubString(
|
||||
let field = &field.to_ascii_lowercase();
|
||||
match map_group_field(field.as_str()) {
|
||||
Some(GroupColumn::DisplayName) => Ok(GroupRequestFilter::DisplayNameSubString(
|
||||
substring_filter.clone().into(),
|
||||
)),
|
||||
GroupFieldType::NoMatch => Ok(GroupRequestFilter::from(false)),
|
||||
_ => Err(LdapError {
|
||||
code: LdapResultCode::UnwillingToPerform,
|
||||
message: format!(
|
||||
"Unsupported group attribute for substring filter: \"{}\"",
|
||||
"Unsupported group attribute for substring filter: {:?}",
|
||||
field
|
||||
),
|
||||
}),
|
||||
@@ -241,15 +200,15 @@ fn convert_group_filter(
|
||||
}
|
||||
}
|
||||
|
||||
#[instrument(skip_all, level = "debug", fields(ldap_filter))]
|
||||
#[instrument(skip_all, level = "debug")]
|
||||
pub async fn get_groups_list<Backend: GroupListerBackendHandler>(
|
||||
ldap_info: &LdapInfo,
|
||||
ldap_filter: &LdapFilter,
|
||||
base: &str,
|
||||
backend: &Backend,
|
||||
schema: &PublicSchema,
|
||||
) -> LdapResult<Vec<Group>> {
|
||||
let filters = convert_group_filter(ldap_info, ldap_filter, schema)?;
|
||||
debug!(?ldap_filter);
|
||||
let filters = convert_group_filter(ldap_info, ldap_filter)?;
|
||||
debug!(?filters);
|
||||
backend
|
||||
.list_groups(Some(filters))
|
||||
@@ -265,7 +224,6 @@ pub fn convert_groups_to_ldap_op<'a>(
|
||||
attributes: &'a [String],
|
||||
ldap_info: &'a LdapInfo,
|
||||
user_filter: &'a Option<UserId>,
|
||||
schema: &'a PublicSchema,
|
||||
) -> impl Iterator<Item = LdapOp> + 'a {
|
||||
groups.into_iter().map(move |g| {
|
||||
LdapOp::SearchResultEntry(make_ldap_search_group_result_entry(
|
||||
@@ -274,7 +232,6 @@ pub fn convert_groups_to_ldap_op<'a>(
|
||||
attributes,
|
||||
user_filter,
|
||||
&ldap_info.ignored_group_attributes,
|
||||
schema,
|
||||
))
|
||||
})
|
||||
}
|
||||
|
||||
@@ -5,17 +5,17 @@ use ldap3_proto::{
|
||||
use tracing::{debug, instrument, warn};
|
||||
|
||||
use crate::domain::{
|
||||
deserialize::deserialize_attribute_value,
|
||||
handler::{UserListerBackendHandler, UserRequestFilter},
|
||||
ldap::{
|
||||
error::{LdapError, LdapResult},
|
||||
utils::{
|
||||
expand_attribute_wildcards, get_custom_attribute, get_group_id_from_distinguished_name,
|
||||
get_user_id_from_distinguished_name, map_user_field, LdapInfo, UserFieldType,
|
||||
},
|
||||
error::LdapError,
|
||||
utils::{expand_attribute_wildcards, get_user_id_from_distinguished_name},
|
||||
},
|
||||
schema::{PublicSchema, SchemaUserAttributeExtractor},
|
||||
types::{AttributeName, AttributeType, GroupDetails, User, UserAndGroups, UserColumn, UserId},
|
||||
types::{GroupDetails, User, UserAndGroups, UserColumn, UserId},
|
||||
};
|
||||
|
||||
use super::{
|
||||
error::LdapResult,
|
||||
utils::{get_group_id_from_distinguished_name, map_user_field, LdapInfo},
|
||||
};
|
||||
|
||||
pub fn get_user_attribute(
|
||||
@@ -23,79 +23,57 @@ pub fn get_user_attribute(
|
||||
attribute: &str,
|
||||
base_dn_str: &str,
|
||||
groups: Option<&[GroupDetails]>,
|
||||
ignored_user_attributes: &[AttributeName],
|
||||
schema: &PublicSchema,
|
||||
ignored_user_attributes: &[String],
|
||||
) -> Option<Vec<Vec<u8>>> {
|
||||
let attribute = AttributeName::from(attribute);
|
||||
let attribute_values = match map_user_field(&attribute, schema) {
|
||||
UserFieldType::ObjectClass => vec![
|
||||
let attribute = attribute.to_ascii_lowercase();
|
||||
let attribute_values = match attribute.as_str() {
|
||||
"objectclass" => vec![
|
||||
b"inetOrgPerson".to_vec(),
|
||||
b"posixAccount".to_vec(),
|
||||
b"mailAccount".to_vec(),
|
||||
b"person".to_vec(),
|
||||
],
|
||||
// dn is always returned as part of the base response.
|
||||
UserFieldType::Dn => return None,
|
||||
UserFieldType::EntryDn => {
|
||||
vec![format!("uid={},ou=people,{}", &user.user_id, base_dn_str).into_bytes()]
|
||||
}
|
||||
UserFieldType::MemberOf => groups
|
||||
"dn" | "distinguishedname" => return None,
|
||||
"uid" | "user_id" | "id" => vec![user.user_id.to_string().into_bytes()],
|
||||
"entryuuid" | "uuid" => vec![user.uuid.to_string().into_bytes()],
|
||||
"mail" | "email" => vec![user.email.clone().into_bytes()],
|
||||
"givenname" | "first_name" | "firstname" => vec![user.first_name.clone()?.into_bytes()],
|
||||
"sn" | "last_name" | "lastname" => vec![user.last_name.clone()?.into_bytes()],
|
||||
"jpegphoto" | "avatar" => vec![user.avatar.clone()?.into_bytes()],
|
||||
"memberof" => groups
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.map(|id_and_name| {
|
||||
format!("cn={},ou=groups,{}", &id_and_name.display_name, base_dn_str).into_bytes()
|
||||
})
|
||||
.collect(),
|
||||
UserFieldType::PrimaryField(UserColumn::UserId) => {
|
||||
vec![user.user_id.to_string().into_bytes()]
|
||||
"cn" | "displayname" => vec![user.display_name.clone()?.into_bytes()],
|
||||
"creationdate" | "creation_date" | "createtimestamp" | "modifytimestamp" => {
|
||||
vec![chrono::Utc
|
||||
.from_utc_datetime(&user.creation_date)
|
||||
.to_rfc3339()
|
||||
.into_bytes()]
|
||||
}
|
||||
UserFieldType::PrimaryField(UserColumn::Email) => vec![user.email.to_string().into_bytes()],
|
||||
UserFieldType::PrimaryField(
|
||||
UserColumn::LowercaseEmail
|
||||
| UserColumn::PasswordHash
|
||||
| UserColumn::TotpSecret
|
||||
| UserColumn::MfaType,
|
||||
) => panic!("Should not get here"),
|
||||
UserFieldType::PrimaryField(UserColumn::Uuid) => vec![user.uuid.to_string().into_bytes()],
|
||||
UserFieldType::PrimaryField(UserColumn::DisplayName) => {
|
||||
vec![user.display_name.clone()?.into_bytes()]
|
||||
"1.1" => return None,
|
||||
// We ignore the operational attribute wildcard.
|
||||
"+" => return None,
|
||||
"*" => {
|
||||
panic!(
|
||||
"Matched {}, * should have been expanded into attribute list and * removed",
|
||||
attribute
|
||||
)
|
||||
}
|
||||
UserFieldType::PrimaryField(UserColumn::CreationDate) => vec![chrono::Utc
|
||||
.from_utc_datetime(&user.creation_date)
|
||||
.to_rfc3339()
|
||||
.into_bytes()],
|
||||
UserFieldType::Attribute(attr, _, _) => {
|
||||
get_custom_attribute::<SchemaUserAttributeExtractor>(&user.attributes, &attr, schema)?
|
||||
}
|
||||
UserFieldType::NoMatch => match attribute.as_str() {
|
||||
"1.1" => return None,
|
||||
// We ignore the operational attribute wildcard.
|
||||
"+" => return None,
|
||||
"*" => {
|
||||
panic!(
|
||||
"Matched {}, * should have been expanded into attribute list and * removed",
|
||||
attribute
|
||||
)
|
||||
}
|
||||
_ => {
|
||||
if ignored_user_attributes.contains(&attribute) {
|
||||
return None;
|
||||
}
|
||||
get_custom_attribute::<SchemaUserAttributeExtractor>(
|
||||
&user.attributes,
|
||||
&attribute,
|
||||
schema,
|
||||
)
|
||||
.or_else(|| {
|
||||
warn!(
|
||||
r#"Ignoring unrecognized group attribute: {}\n\
|
||||
_ => {
|
||||
if !ignored_user_attributes.contains(&attribute) {
|
||||
warn!(
|
||||
r#"Ignoring unrecognized group attribute: {}\n\
|
||||
To disable this warning, add it to "ignored_user_attributes" in the config."#,
|
||||
attribute
|
||||
);
|
||||
None
|
||||
})?
|
||||
attribute
|
||||
);
|
||||
}
|
||||
},
|
||||
return None;
|
||||
}
|
||||
};
|
||||
if attribute_values.len() == 1 && attribute_values[0].is_empty() {
|
||||
None
|
||||
@@ -121,8 +99,7 @@ fn make_ldap_search_user_result_entry(
|
||||
base_dn_str: &str,
|
||||
attributes: &[String],
|
||||
groups: Option<&[GroupDetails]>,
|
||||
ignored_user_attributes: &[AttributeName],
|
||||
schema: &PublicSchema,
|
||||
ignored_user_attributes: &[String],
|
||||
) -> LdapSearchResultEntry {
|
||||
let expanded_attributes = expand_user_attribute_wildcards(attributes);
|
||||
let dn = format!("uid={},ou=people,{}", user.user_id.as_str(), base_dn_str);
|
||||
@@ -131,14 +108,8 @@ fn make_ldap_search_user_result_entry(
|
||||
attributes: expanded_attributes
|
||||
.iter()
|
||||
.filter_map(|a| {
|
||||
let values = get_user_attribute(
|
||||
&user,
|
||||
a,
|
||||
base_dn_str,
|
||||
groups,
|
||||
ignored_user_attributes,
|
||||
schema,
|
||||
)?;
|
||||
let values =
|
||||
get_user_attribute(&user, a, base_dn_str, groups, ignored_user_attributes)?;
|
||||
Some(LdapPartialAttribute {
|
||||
atype: a.to_string(),
|
||||
vals: values,
|
||||
@@ -148,26 +119,8 @@ fn make_ldap_search_user_result_entry(
|
||||
}
|
||||
}
|
||||
|
||||
fn get_user_attribute_equality_filter(
|
||||
field: &AttributeName,
|
||||
typ: AttributeType,
|
||||
is_list: bool,
|
||||
value: &str,
|
||||
) -> LdapResult<UserRequestFilter> {
|
||||
deserialize_attribute_value(&[value.to_owned()], typ, is_list)
|
||||
.map_err(|e| LdapError {
|
||||
code: LdapResultCode::Other,
|
||||
message: format!("Invalid value for attribute {}: {}", field, e),
|
||||
})
|
||||
.map(|v| UserRequestFilter::AttributeEquality(field.clone(), v))
|
||||
}
|
||||
|
||||
fn convert_user_filter(
|
||||
ldap_info: &LdapInfo,
|
||||
filter: &LdapFilter,
|
||||
schema: &PublicSchema,
|
||||
) -> LdapResult<UserRequestFilter> {
|
||||
let rec = |f| convert_user_filter(ldap_info, f, schema);
|
||||
fn convert_user_filter(ldap_info: &LdapInfo, filter: &LdapFilter) -> LdapResult<UserRequestFilter> {
|
||||
let rec = |f| convert_user_filter(ldap_info, f);
|
||||
match filter {
|
||||
LdapFilter::And(filters) => Ok(UserRequestFilter::And(
|
||||
filters.iter().map(rec).collect::<LdapResult<_>>()?,
|
||||
@@ -177,82 +130,72 @@ fn convert_user_filter(
|
||||
)),
|
||||
LdapFilter::Not(filter) => Ok(UserRequestFilter::Not(Box::new(rec(filter)?))),
|
||||
LdapFilter::Equality(field, value) => {
|
||||
let field = AttributeName::from(field.as_str());
|
||||
let value = value.to_ascii_lowercase();
|
||||
match map_user_field(&field, schema) {
|
||||
UserFieldType::PrimaryField(UserColumn::UserId) => {
|
||||
Ok(UserRequestFilter::UserId(UserId::new(&value)))
|
||||
}
|
||||
UserFieldType::PrimaryField(field) => Ok(UserRequestFilter::Equality(field, value)),
|
||||
UserFieldType::Attribute(field, typ, is_list) => {
|
||||
get_user_attribute_equality_filter(&field, typ, is_list, &value)
|
||||
}
|
||||
UserFieldType::NoMatch => {
|
||||
if !ldap_info.ignored_user_attributes.contains(&field) {
|
||||
warn!(
|
||||
r#"Ignoring unknown user attribute "{}" in filter.\n\
|
||||
To disable this warning, add it to "ignored_user_attributes" in the config"#,
|
||||
field
|
||||
);
|
||||
}
|
||||
Ok(UserRequestFilter::from(false))
|
||||
}
|
||||
UserFieldType::ObjectClass => Ok(UserRequestFilter::from(matches!(
|
||||
value.as_str(),
|
||||
"person" | "inetorgperson" | "posixaccount" | "mailaccount"
|
||||
))),
|
||||
UserFieldType::MemberOf => Ok(UserRequestFilter::MemberOf(
|
||||
let field = &field.to_ascii_lowercase();
|
||||
match field.as_str() {
|
||||
"memberof" => Ok(UserRequestFilter::MemberOf(
|
||||
get_group_id_from_distinguished_name(
|
||||
&value,
|
||||
&value.to_ascii_lowercase(),
|
||||
&ldap_info.base_dn,
|
||||
&ldap_info.base_dn_str,
|
||||
)?,
|
||||
)),
|
||||
UserFieldType::EntryDn | UserFieldType::Dn => {
|
||||
Ok(get_user_id_from_distinguished_name(
|
||||
value.as_str(),
|
||||
&ldap_info.base_dn,
|
||||
&ldap_info.base_dn_str,
|
||||
)
|
||||
.map(UserRequestFilter::UserId)
|
||||
.unwrap_or_else(|_| {
|
||||
warn!("Invalid dn filter on user: {}", value);
|
||||
UserRequestFilter::from(false)
|
||||
}))
|
||||
}
|
||||
"objectclass" => Ok(UserRequestFilter::from(matches!(
|
||||
value.to_ascii_lowercase().as_str(),
|
||||
"person" | "inetorgperson" | "posixaccount" | "mailaccount"
|
||||
))),
|
||||
"dn" => Ok(get_user_id_from_distinguished_name(
|
||||
value.to_ascii_lowercase().as_str(),
|
||||
&ldap_info.base_dn,
|
||||
&ldap_info.base_dn_str,
|
||||
)
|
||||
.map(UserRequestFilter::UserId)
|
||||
.unwrap_or_else(|_| {
|
||||
warn!("Invalid dn filter on user: {}", value);
|
||||
UserRequestFilter::from(false)
|
||||
})),
|
||||
_ => match map_user_field(field) {
|
||||
Some(UserColumn::UserId) => Ok(UserRequestFilter::UserId(UserId::new(value))),
|
||||
Some(field) => Ok(UserRequestFilter::Equality(field, value.clone())),
|
||||
None => {
|
||||
if !ldap_info.ignored_user_attributes.contains(field) {
|
||||
warn!(
|
||||
r#"Ignoring unknown user attribute "{}" in filter.\n\
|
||||
To disable this warning, add it to "ignored_user_attributes" in the config"#,
|
||||
field
|
||||
);
|
||||
}
|
||||
Ok(UserRequestFilter::from(false))
|
||||
}
|
||||
},
|
||||
}
|
||||
}
|
||||
LdapFilter::Present(field) => {
|
||||
let field = AttributeName::from(field.as_str());
|
||||
let field = &field.to_ascii_lowercase();
|
||||
// Check that it's a field we support.
|
||||
Ok(UserRequestFilter::from(
|
||||
field.as_str() == "objectclass"
|
||||
|| field.as_str() == "dn"
|
||||
|| field.as_str() == "distinguishedname"
|
||||
|| !matches!(map_user_field(&field, schema), UserFieldType::NoMatch),
|
||||
field == "objectclass"
|
||||
|| field == "dn"
|
||||
|| field == "distinguishedname"
|
||||
|| map_user_field(field).is_some(),
|
||||
))
|
||||
}
|
||||
LdapFilter::Substring(field, substring_filter) => {
|
||||
let field = AttributeName::from(field.as_str());
|
||||
match map_user_field(&field, schema) {
|
||||
UserFieldType::PrimaryField(UserColumn::UserId) => Ok(
|
||||
UserRequestFilter::UserIdSubString(substring_filter.clone().into()),
|
||||
),
|
||||
UserFieldType::Attribute(_, _, _)
|
||||
| UserFieldType::ObjectClass
|
||||
| UserFieldType::MemberOf
|
||||
| UserFieldType::Dn
|
||||
| UserFieldType::EntryDn
|
||||
| UserFieldType::PrimaryField(UserColumn::CreationDate)
|
||||
| UserFieldType::PrimaryField(UserColumn::Uuid) => Err(LdapError {
|
||||
let field = &field.to_ascii_lowercase();
|
||||
match map_user_field(field.as_str()) {
|
||||
Some(UserColumn::UserId) => Ok(UserRequestFilter::UserIdSubString(
|
||||
substring_filter.clone().into(),
|
||||
)),
|
||||
None
|
||||
| Some(UserColumn::CreationDate)
|
||||
| Some(UserColumn::Avatar)
|
||||
| Some(UserColumn::Uuid) => Err(LdapError {
|
||||
code: LdapResultCode::UnwillingToPerform,
|
||||
message: format!(
|
||||
"Unsupported user attribute for substring filter: {:?}",
|
||||
field
|
||||
),
|
||||
}),
|
||||
UserFieldType::NoMatch => Ok(UserRequestFilter::from(false)),
|
||||
UserFieldType::PrimaryField(field) => Ok(UserRequestFilter::SubString(
|
||||
Some(field) => Ok(UserRequestFilter::SubString(
|
||||
field,
|
||||
substring_filter.clone().into(),
|
||||
)),
|
||||
@@ -269,16 +212,16 @@ fn expand_user_attribute_wildcards(attributes: &[String]) -> Vec<&str> {
|
||||
expand_attribute_wildcards(attributes, ALL_USER_ATTRIBUTE_KEYS)
|
||||
}
|
||||
|
||||
#[instrument(skip_all, level = "debug", fields(ldap_filter, request_groups))]
|
||||
#[instrument(skip_all, level = "debug")]
|
||||
pub async fn get_user_list<Backend: UserListerBackendHandler>(
|
||||
ldap_info: &LdapInfo,
|
||||
ldap_filter: &LdapFilter,
|
||||
request_groups: bool,
|
||||
base: &str,
|
||||
backend: &Backend,
|
||||
schema: &PublicSchema,
|
||||
) -> LdapResult<Vec<UserAndGroups>> {
|
||||
let filters = convert_user_filter(ldap_info, ldap_filter, schema)?;
|
||||
debug!(?ldap_filter);
|
||||
let filters = convert_user_filter(ldap_info, ldap_filter)?;
|
||||
debug!(?filters);
|
||||
backend
|
||||
.list_users(Some(filters), request_groups)
|
||||
@@ -293,7 +236,6 @@ pub fn convert_users_to_ldap_op<'a>(
|
||||
users: Vec<UserAndGroups>,
|
||||
attributes: &'a [String],
|
||||
ldap_info: &'a LdapInfo,
|
||||
schema: &'a PublicSchema,
|
||||
) -> impl Iterator<Item = LdapOp> + 'a {
|
||||
users.into_iter().map(move |u| {
|
||||
LdapOp::SearchResultEntry(make_ldap_search_user_result_entry(
|
||||
@@ -302,7 +244,6 @@ pub fn convert_users_to_ldap_op<'a>(
|
||||
attributes,
|
||||
u.groups.as_deref(),
|
||||
&ldap_info.ignored_user_attributes,
|
||||
schema,
|
||||
))
|
||||
})
|
||||
}
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
use chrono::{NaiveDateTime, TimeZone};
|
||||
use itertools::Itertools;
|
||||
use ldap3_proto::{proto::LdapSubstringFilter, LdapResultCode};
|
||||
use tracing::{debug, instrument, warn};
|
||||
@@ -6,10 +5,7 @@ use tracing::{debug, instrument, warn};
|
||||
use crate::domain::{
|
||||
handler::SubStringFilter,
|
||||
ldap::error::{LdapError, LdapResult},
|
||||
schema::{PublicSchema, SchemaAttributeExtractor},
|
||||
types::{
|
||||
AttributeName, AttributeType, AttributeValue, GroupName, JpegPhoto, UserColumn, UserId,
|
||||
},
|
||||
types::{GroupColumn, UserColumn, UserId},
|
||||
};
|
||||
|
||||
impl From<LdapSubstringFilter> for SubStringFilter {
|
||||
@@ -105,11 +101,11 @@ pub fn get_group_id_from_distinguished_name(
|
||||
dn: &str,
|
||||
base_tree: &[(String, String)],
|
||||
base_dn_str: &str,
|
||||
) -> LdapResult<GroupName> {
|
||||
get_id_from_distinguished_name(dn, base_tree, base_dn_str, true).map(GroupName::from)
|
||||
) -> LdapResult<String> {
|
||||
get_id_from_distinguished_name(dn, base_tree, base_dn_str, true)
|
||||
}
|
||||
|
||||
#[instrument(skip(all_attribute_keys), level = "debug")]
|
||||
#[instrument(skip_all, level = "debug")]
|
||||
pub fn expand_attribute_wildcards<'a>(
|
||||
ldap_attributes: &'a [String],
|
||||
all_attribute_keys: &'a [&'static str],
|
||||
@@ -131,7 +127,7 @@ pub fn expand_attribute_wildcards<'a>(
|
||||
.into_iter()
|
||||
.unique_by(|a| a.to_ascii_lowercase())
|
||||
.collect_vec();
|
||||
debug!(?resolved_attributes);
|
||||
debug!(?ldap_attributes, ?resolved_attributes);
|
||||
resolved_attributes
|
||||
}
|
||||
|
||||
@@ -156,151 +152,38 @@ pub fn is_subtree(subtree: &[(String, String)], base_tree: &[(String, String)])
|
||||
true
|
||||
}
|
||||
|
||||
pub enum UserFieldType {
|
||||
NoMatch,
|
||||
ObjectClass,
|
||||
MemberOf,
|
||||
Dn,
|
||||
EntryDn,
|
||||
PrimaryField(UserColumn),
|
||||
Attribute(AttributeName, AttributeType, bool),
|
||||
}
|
||||
|
||||
pub fn map_user_field(field: &AttributeName, schema: &PublicSchema) -> UserFieldType {
|
||||
match field.as_str() {
|
||||
"memberof" | "ismemberof" => UserFieldType::MemberOf,
|
||||
"objectclass" => UserFieldType::ObjectClass,
|
||||
"dn" | "distinguishedname" => UserFieldType::Dn,
|
||||
"entrydn" => UserFieldType::EntryDn,
|
||||
"uid" | "user_id" | "id" => UserFieldType::PrimaryField(UserColumn::UserId),
|
||||
"mail" | "email" => UserFieldType::PrimaryField(UserColumn::Email),
|
||||
"cn" | "displayname" | "display_name" => {
|
||||
UserFieldType::PrimaryField(UserColumn::DisplayName)
|
||||
}
|
||||
"givenname" | "first_name" | "firstname" => UserFieldType::Attribute(
|
||||
AttributeName::from("first_name"),
|
||||
AttributeType::String,
|
||||
false,
|
||||
),
|
||||
"sn" | "last_name" | "lastname" => UserFieldType::Attribute(
|
||||
AttributeName::from("last_name"),
|
||||
AttributeType::String,
|
||||
false,
|
||||
),
|
||||
"avatar" | "jpegphoto" => UserFieldType::Attribute(
|
||||
AttributeName::from("avatar"),
|
||||
AttributeType::JpegPhoto,
|
||||
false,
|
||||
),
|
||||
pub fn map_user_field(field: &str) -> Option<UserColumn> {
|
||||
assert!(field == field.to_ascii_lowercase());
|
||||
Some(match field {
|
||||
"uid" | "user_id" | "id" => UserColumn::UserId,
|
||||
"mail" | "email" => UserColumn::Email,
|
||||
"cn" | "displayname" | "display_name" => UserColumn::DisplayName,
|
||||
"givenname" | "first_name" | "firstname" => UserColumn::FirstName,
|
||||
"sn" | "last_name" | "lastname" => UserColumn::LastName,
|
||||
"avatar" | "jpegphoto" => UserColumn::Avatar,
|
||||
"creationdate" | "createtimestamp" | "modifytimestamp" | "creation_date" => {
|
||||
UserFieldType::PrimaryField(UserColumn::CreationDate)
|
||||
UserColumn::CreationDate
|
||||
}
|
||||
"entryuuid" | "uuid" => UserFieldType::PrimaryField(UserColumn::Uuid),
|
||||
_ => schema
|
||||
.get_schema()
|
||||
.user_attributes
|
||||
.get_attribute_type(field)
|
||||
.map(|(t, is_list)| UserFieldType::Attribute(field.clone(), t, is_list))
|
||||
.unwrap_or(UserFieldType::NoMatch),
|
||||
}
|
||||
"entryuuid" | "uuid" => UserColumn::Uuid,
|
||||
_ => return None,
|
||||
})
|
||||
}
|
||||
|
||||
pub enum GroupFieldType {
|
||||
NoMatch,
|
||||
DisplayName,
|
||||
CreationDate,
|
||||
ObjectClass,
|
||||
Dn,
|
||||
// Like Dn, but returned as part of the attributes.
|
||||
EntryDn,
|
||||
Member,
|
||||
Uuid,
|
||||
Attribute(AttributeName, AttributeType, bool),
|
||||
}
|
||||
|
||||
pub fn map_group_field(field: &AttributeName, schema: &PublicSchema) -> GroupFieldType {
|
||||
match field.as_str() {
|
||||
"dn" | "distinguishedname" => GroupFieldType::Dn,
|
||||
"entrydn" => GroupFieldType::EntryDn,
|
||||
"objectclass" => GroupFieldType::ObjectClass,
|
||||
"cn" | "displayname" | "uid" | "display_name" | "id" => GroupFieldType::DisplayName,
|
||||
pub fn map_group_field(field: &str) -> Option<GroupColumn> {
|
||||
assert!(field == field.to_ascii_lowercase());
|
||||
Some(match field {
|
||||
"cn" | "displayname" | "uid" | "display_name" => GroupColumn::DisplayName,
|
||||
"creationdate" | "createtimestamp" | "modifytimestamp" | "creation_date" => {
|
||||
GroupFieldType::CreationDate
|
||||
GroupColumn::CreationDate
|
||||
}
|
||||
"member" | "uniquemember" => GroupFieldType::Member,
|
||||
"entryuuid" | "uuid" => GroupFieldType::Uuid,
|
||||
_ => schema
|
||||
.get_schema()
|
||||
.group_attributes
|
||||
.get_attribute_type(field)
|
||||
.map(|(t, is_list)| GroupFieldType::Attribute(field.clone(), t, is_list))
|
||||
.unwrap_or(GroupFieldType::NoMatch),
|
||||
}
|
||||
"entryuuid" | "uuid" => GroupColumn::Uuid,
|
||||
_ => return None,
|
||||
})
|
||||
}
|
||||
|
||||
pub struct LdapInfo {
|
||||
pub base_dn: Vec<(String, String)>,
|
||||
pub base_dn_str: String,
|
||||
pub ignored_user_attributes: Vec<AttributeName>,
|
||||
pub ignored_group_attributes: Vec<AttributeName>,
|
||||
}
|
||||
|
||||
pub fn get_custom_attribute<Extractor: SchemaAttributeExtractor>(
|
||||
attributes: &[AttributeValue],
|
||||
attribute_name: &AttributeName,
|
||||
schema: &PublicSchema,
|
||||
) -> Option<Vec<Vec<u8>>> {
|
||||
let convert_date = |date| {
|
||||
chrono::Utc
|
||||
.from_utc_datetime(&date)
|
||||
.to_rfc3339()
|
||||
.into_bytes()
|
||||
};
|
||||
Extractor::get_attributes(schema)
|
||||
.get_attribute_type(attribute_name)
|
||||
.and_then(|attribute_type| {
|
||||
attributes
|
||||
.iter()
|
||||
.find(|a| &a.name == attribute_name)
|
||||
.map(|attribute| match attribute_type {
|
||||
(AttributeType::String, false) => {
|
||||
vec![attribute.value.unwrap::<String>().into_bytes()]
|
||||
}
|
||||
(AttributeType::Integer, false) => {
|
||||
// LDAP integers are encoded as strings.
|
||||
vec![attribute.value.unwrap::<i64>().to_string().into_bytes()]
|
||||
}
|
||||
(AttributeType::JpegPhoto, false) => {
|
||||
vec![attribute.value.unwrap::<JpegPhoto>().into_bytes()]
|
||||
}
|
||||
(AttributeType::DateTime, false) => {
|
||||
vec![convert_date(attribute.value.unwrap::<NaiveDateTime>())]
|
||||
}
|
||||
(AttributeType::String, true) => attribute
|
||||
.value
|
||||
.unwrap::<Vec<String>>()
|
||||
.into_iter()
|
||||
.map(String::into_bytes)
|
||||
.collect(),
|
||||
(AttributeType::Integer, true) => attribute
|
||||
.value
|
||||
.unwrap::<Vec<i64>>()
|
||||
.into_iter()
|
||||
.map(|i| i.to_string())
|
||||
.map(String::into_bytes)
|
||||
.collect(),
|
||||
(AttributeType::JpegPhoto, true) => attribute
|
||||
.value
|
||||
.unwrap::<Vec<JpegPhoto>>()
|
||||
.into_iter()
|
||||
.map(JpegPhoto::into_bytes)
|
||||
.collect(),
|
||||
(AttributeType::DateTime, true) => attribute
|
||||
.value
|
||||
.unwrap::<Vec<NaiveDateTime>>()
|
||||
.into_iter()
|
||||
.map(convert_date)
|
||||
.collect(),
|
||||
})
|
||||
})
|
||||
pub ignored_user_attributes: Vec<String>,
|
||||
pub ignored_group_attributes: Vec<String>,
|
||||
}
|
||||
|
||||
@@ -1,15 +1,12 @@
|
||||
pub mod deserialize;
|
||||
pub mod error;
|
||||
pub mod handler;
|
||||
pub mod ldap;
|
||||
pub mod model;
|
||||
pub mod opaque_handler;
|
||||
pub mod schema;
|
||||
pub mod sql_backend_handler;
|
||||
pub mod sql_group_backend_handler;
|
||||
pub mod sql_migrations;
|
||||
pub mod sql_opaque_handler;
|
||||
pub mod sql_schema_backend_handler;
|
||||
pub mod sql_tables;
|
||||
pub mod sql_user_backend_handler;
|
||||
pub mod types;
|
||||
|
||||
@@ -1,55 +0,0 @@
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::domain::{
|
||||
handler::AttributeSchema,
|
||||
types::{AttributeName, AttributeType},
|
||||
};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "group_attribute_schema")]
|
||||
pub struct Model {
|
||||
#[sea_orm(
|
||||
primary_key,
|
||||
auto_increment = false,
|
||||
column_name = "group_attribute_schema_name"
|
||||
)]
|
||||
pub attribute_name: AttributeName,
|
||||
#[sea_orm(column_name = "group_attribute_schema_type")]
|
||||
pub attribute_type: AttributeType,
|
||||
#[sea_orm(column_name = "group_attribute_schema_is_list")]
|
||||
pub is_list: bool,
|
||||
#[sea_orm(column_name = "group_attribute_schema_is_group_visible")]
|
||||
pub is_group_visible: bool,
|
||||
#[sea_orm(column_name = "group_attribute_schema_is_group_editable")]
|
||||
pub is_group_editable: bool,
|
||||
#[sea_orm(column_name = "group_attribute_schema_is_hardcoded")]
|
||||
pub is_hardcoded: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(has_many = "super::group_attributes::Entity")]
|
||||
GroupAttributes,
|
||||
}
|
||||
|
||||
impl Related<super::GroupAttributes> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::GroupAttributes.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
impl From<Model> for AttributeSchema {
|
||||
fn from(value: Model) -> Self {
|
||||
Self {
|
||||
name: value.attribute_name,
|
||||
attribute_type: value.attribute_type,
|
||||
is_list: value.is_list,
|
||||
is_visible: value.is_group_visible,
|
||||
is_editable: value.is_group_editable,
|
||||
is_hardcoded: value.is_hardcoded,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,72 +0,0 @@
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::domain::types::{AttributeName, AttributeValue, GroupId, Serialized};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "group_attributes")]
|
||||
pub struct Model {
|
||||
#[sea_orm(
|
||||
primary_key,
|
||||
auto_increment = false,
|
||||
column_name = "group_attribute_group_id"
|
||||
)]
|
||||
pub group_id: GroupId,
|
||||
#[sea_orm(
|
||||
primary_key,
|
||||
auto_increment = false,
|
||||
column_name = "group_attribute_name"
|
||||
)]
|
||||
pub attribute_name: AttributeName,
|
||||
#[sea_orm(column_name = "group_attribute_value")]
|
||||
pub value: Serialized,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::groups::Entity",
|
||||
from = "Column::GroupId",
|
||||
to = "super::groups::Column::GroupId",
|
||||
on_update = "Cascade",
|
||||
on_delete = "Cascade"
|
||||
)]
|
||||
Groups,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::group_attribute_schema::Entity",
|
||||
from = "Column::AttributeName",
|
||||
to = "super::group_attribute_schema::Column::AttributeName",
|
||||
on_update = "Cascade",
|
||||
on_delete = "Cascade"
|
||||
)]
|
||||
GroupAttributeSchema,
|
||||
}
|
||||
|
||||
impl Related<super::Group> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Groups.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::GroupAttributeSchema> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::GroupAttributeSchema.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
impl From<Model> for AttributeValue {
|
||||
fn from(
|
||||
Model {
|
||||
group_id: _,
|
||||
attribute_name,
|
||||
value,
|
||||
}: Model,
|
||||
) -> Self {
|
||||
Self {
|
||||
name: attribute_name,
|
||||
value,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3,15 +3,14 @@
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::domain::types::{GroupId, GroupName, Uuid};
|
||||
use crate::domain::types::{GroupId, Uuid};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "groups")]
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key, auto_increment = false)]
|
||||
pub group_id: GroupId,
|
||||
pub display_name: GroupName,
|
||||
pub lowercase_display_name: String,
|
||||
pub display_name: String,
|
||||
pub creation_date: chrono::NaiveDateTime,
|
||||
pub uuid: Uuid,
|
||||
}
|
||||
@@ -38,7 +37,6 @@ impl From<Model> for crate::domain::types::Group {
|
||||
creation_date: group.creation_date,
|
||||
uuid: group.uuid,
|
||||
users: vec![],
|
||||
attributes: Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -50,7 +48,6 @@ impl From<Model> for crate::domain::types::GroupDetails {
|
||||
display_name: group.display_name,
|
||||
creation_date: group.creation_date,
|
||||
uuid: group.uuid,
|
||||
attributes: Vec::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -9,10 +9,4 @@ pub mod memberships;
|
||||
pub mod password_reset_tokens;
|
||||
pub mod users;
|
||||
|
||||
pub mod user_attribute_schema;
|
||||
pub mod user_attributes;
|
||||
|
||||
pub mod group_attribute_schema;
|
||||
pub mod group_attributes;
|
||||
|
||||
pub use prelude::*;
|
||||
|
||||
@@ -1,9 +1,5 @@
|
||||
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.10.3
|
||||
|
||||
pub use super::group_attribute_schema::Column as GroupAttributeSchemaColumn;
|
||||
pub use super::group_attribute_schema::Entity as GroupAttributeSchema;
|
||||
pub use super::group_attributes::Column as GroupAttributesColumn;
|
||||
pub use super::group_attributes::Entity as GroupAttributes;
|
||||
pub use super::groups::Column as GroupColumn;
|
||||
pub use super::groups::Entity as Group;
|
||||
pub use super::jwt_refresh_storage::Column as JwtRefreshStorageColumn;
|
||||
@@ -14,9 +10,5 @@ pub use super::memberships::Column as MembershipColumn;
|
||||
pub use super::memberships::Entity as Membership;
|
||||
pub use super::password_reset_tokens::Column as PasswordResetTokensColumn;
|
||||
pub use super::password_reset_tokens::Entity as PasswordResetTokens;
|
||||
pub use super::user_attribute_schema::Column as UserAttributeSchemaColumn;
|
||||
pub use super::user_attribute_schema::Entity as UserAttributeSchema;
|
||||
pub use super::user_attributes::Column as UserAttributesColumn;
|
||||
pub use super::user_attributes::Entity as UserAttributes;
|
||||
pub use super::users::Column as UserColumn;
|
||||
pub use super::users::Entity as User;
|
||||
|
||||
@@ -1,55 +0,0 @@
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::domain::{
|
||||
handler::AttributeSchema,
|
||||
types::{AttributeName, AttributeType},
|
||||
};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "user_attribute_schema")]
|
||||
pub struct Model {
|
||||
#[sea_orm(
|
||||
primary_key,
|
||||
auto_increment = false,
|
||||
column_name = "user_attribute_schema_name"
|
||||
)]
|
||||
pub attribute_name: AttributeName,
|
||||
#[sea_orm(column_name = "user_attribute_schema_type")]
|
||||
pub attribute_type: AttributeType,
|
||||
#[sea_orm(column_name = "user_attribute_schema_is_list")]
|
||||
pub is_list: bool,
|
||||
#[sea_orm(column_name = "user_attribute_schema_is_user_visible")]
|
||||
pub is_user_visible: bool,
|
||||
#[sea_orm(column_name = "user_attribute_schema_is_user_editable")]
|
||||
pub is_user_editable: bool,
|
||||
#[sea_orm(column_name = "user_attribute_schema_is_hardcoded")]
|
||||
pub is_hardcoded: bool,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(has_many = "super::user_attributes::Entity")]
|
||||
UserAttributes,
|
||||
}
|
||||
|
||||
impl Related<super::UserAttributes> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::UserAttributes.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
impl From<Model> for AttributeSchema {
|
||||
fn from(value: Model) -> Self {
|
||||
Self {
|
||||
name: value.attribute_name,
|
||||
attribute_type: value.attribute_type,
|
||||
is_list: value.is_list,
|
||||
is_visible: value.is_user_visible,
|
||||
is_editable: value.is_user_editable,
|
||||
is_hardcoded: value.is_hardcoded,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,72 +0,0 @@
|
||||
use sea_orm::entity::prelude::*;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::domain::types::{AttributeName, AttributeValue, Serialized, UserId};
|
||||
|
||||
#[derive(Clone, Debug, PartialEq, DeriveEntityModel, Eq, Serialize, Deserialize)]
|
||||
#[sea_orm(table_name = "user_attributes")]
|
||||
pub struct Model {
|
||||
#[sea_orm(
|
||||
primary_key,
|
||||
auto_increment = false,
|
||||
column_name = "user_attribute_user_id"
|
||||
)]
|
||||
pub user_id: UserId,
|
||||
#[sea_orm(
|
||||
primary_key,
|
||||
auto_increment = false,
|
||||
column_name = "user_attribute_name"
|
||||
)]
|
||||
pub attribute_name: AttributeName,
|
||||
#[sea_orm(column_name = "user_attribute_value")]
|
||||
pub value: Serialized,
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
|
||||
pub enum Relation {
|
||||
#[sea_orm(
|
||||
belongs_to = "super::users::Entity",
|
||||
from = "Column::UserId",
|
||||
to = "super::users::Column::UserId",
|
||||
on_update = "Cascade",
|
||||
on_delete = "Cascade"
|
||||
)]
|
||||
Users,
|
||||
#[sea_orm(
|
||||
belongs_to = "super::user_attribute_schema::Entity",
|
||||
from = "Column::AttributeName",
|
||||
to = "super::user_attribute_schema::Column::AttributeName",
|
||||
on_update = "Cascade",
|
||||
on_delete = "Cascade"
|
||||
)]
|
||||
UserAttributeSchema,
|
||||
}
|
||||
|
||||
impl Related<super::User> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::Users.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl Related<super::UserAttributeSchema> for Entity {
|
||||
fn to() -> RelationDef {
|
||||
Relation::UserAttributeSchema.def()
|
||||
}
|
||||
}
|
||||
|
||||
impl ActiveModelBehavior for ActiveModel {}
|
||||
|
||||
impl From<Model> for AttributeValue {
|
||||
fn from(
|
||||
Model {
|
||||
user_id: _,
|
||||
attribute_name,
|
||||
value,
|
||||
}: Model,
|
||||
) -> Self {
|
||||
Self {
|
||||
name: attribute_name,
|
||||
value,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -3,7 +3,7 @@
|
||||
use sea_orm::{entity::prelude::*, sea_query::BlobSize};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::domain::types::{Email, UserId, Uuid};
|
||||
use crate::domain::types::{JpegPhoto, UserId, Uuid};
|
||||
|
||||
#[derive(Copy, Clone, Default, Debug, DeriveEntity)]
|
||||
pub struct Entity;
|
||||
@@ -13,9 +13,11 @@ pub struct Entity;
|
||||
pub struct Model {
|
||||
#[sea_orm(primary_key, auto_increment = false)]
|
||||
pub user_id: UserId,
|
||||
pub email: Email,
|
||||
pub lowercase_email: String,
|
||||
pub email: String,
|
||||
pub display_name: Option<String>,
|
||||
pub first_name: Option<String>,
|
||||
pub last_name: Option<String>,
|
||||
pub avatar: Option<JpegPhoto>,
|
||||
pub creation_date: chrono::NaiveDateTime,
|
||||
pub password_hash: Option<Vec<u8>>,
|
||||
pub totp_secret: Option<String>,
|
||||
@@ -33,8 +35,10 @@ impl EntityName for Entity {
|
||||
pub enum Column {
|
||||
UserId,
|
||||
Email,
|
||||
LowercaseEmail,
|
||||
DisplayName,
|
||||
FirstName,
|
||||
LastName,
|
||||
Avatar,
|
||||
CreationDate,
|
||||
PasswordHash,
|
||||
TotpSecret,
|
||||
@@ -49,8 +53,10 @@ impl ColumnTrait for Column {
|
||||
match self {
|
||||
Column::UserId => ColumnType::String(Some(255)),
|
||||
Column::Email => ColumnType::String(Some(255)),
|
||||
Column::LowercaseEmail => ColumnType::String(Some(255)),
|
||||
Column::DisplayName => ColumnType::String(Some(255)),
|
||||
Column::FirstName => ColumnType::String(Some(255)),
|
||||
Column::LastName => ColumnType::String(Some(255)),
|
||||
Column::Avatar => ColumnType::Binary(BlobSize::Long),
|
||||
Column::CreationDate => ColumnType::DateTime,
|
||||
Column::PasswordHash => ColumnType::Binary(BlobSize::Medium),
|
||||
Column::TotpSecret => ColumnType::String(Some(64)),
|
||||
@@ -118,9 +124,11 @@ impl From<Model> for crate::domain::types::User {
|
||||
user_id: user.user_id,
|
||||
email: user.email,
|
||||
display_name: user.display_name,
|
||||
first_name: user.first_name,
|
||||
last_name: user.last_name,
|
||||
creation_date: user.creation_date,
|
||||
uuid: user.uuid,
|
||||
attributes: Vec::new(),
|
||||
avatar: user.avatar,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,124 +0,0 @@
|
||||
use crate::domain::{
|
||||
handler::{AttributeList, AttributeSchema, Schema},
|
||||
types::AttributeType,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(PartialEq, Eq, Debug, Serialize, Deserialize)]
|
||||
pub struct PublicSchema(Schema);
|
||||
|
||||
impl PublicSchema {
|
||||
pub fn get_schema(&self) -> &Schema {
|
||||
&self.0
|
||||
}
|
||||
}
|
||||
|
||||
pub trait SchemaAttributeExtractor: std::marker::Send {
|
||||
fn get_attributes(schema: &PublicSchema) -> &AttributeList;
|
||||
}
|
||||
|
||||
pub struct SchemaUserAttributeExtractor;
|
||||
|
||||
impl SchemaAttributeExtractor for SchemaUserAttributeExtractor {
|
||||
fn get_attributes(schema: &PublicSchema) -> &AttributeList {
|
||||
&schema.get_schema().user_attributes
|
||||
}
|
||||
}
|
||||
|
||||
pub struct SchemaGroupAttributeExtractor;
|
||||
|
||||
impl SchemaAttributeExtractor for SchemaGroupAttributeExtractor {
|
||||
fn get_attributes(schema: &PublicSchema) -> &AttributeList {
|
||||
&schema.get_schema().group_attributes
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Schema> for PublicSchema {
|
||||
fn from(mut schema: Schema) -> Self {
|
||||
schema.user_attributes.attributes.extend_from_slice(&[
|
||||
AttributeSchema {
|
||||
name: "user_id".into(),
|
||||
attribute_type: AttributeType::String,
|
||||
is_list: false,
|
||||
is_visible: true,
|
||||
is_editable: false,
|
||||
is_hardcoded: true,
|
||||
},
|
||||
AttributeSchema {
|
||||
name: "creation_date".into(),
|
||||
attribute_type: AttributeType::DateTime,
|
||||
is_list: false,
|
||||
is_visible: true,
|
||||
is_editable: false,
|
||||
is_hardcoded: true,
|
||||
},
|
||||
AttributeSchema {
|
||||
name: "mail".into(),
|
||||
attribute_type: AttributeType::String,
|
||||
is_list: false,
|
||||
is_visible: true,
|
||||
is_editable: true,
|
||||
is_hardcoded: true,
|
||||
},
|
||||
AttributeSchema {
|
||||
name: "uuid".into(),
|
||||
attribute_type: AttributeType::String,
|
||||
is_list: false,
|
||||
is_visible: true,
|
||||
is_editable: false,
|
||||
is_hardcoded: true,
|
||||
},
|
||||
AttributeSchema {
|
||||
name: "display_name".into(),
|
||||
attribute_type: AttributeType::String,
|
||||
is_list: false,
|
||||
is_visible: true,
|
||||
is_editable: true,
|
||||
is_hardcoded: true,
|
||||
},
|
||||
]);
|
||||
schema
|
||||
.user_attributes
|
||||
.attributes
|
||||
.sort_by(|a, b| a.name.cmp(&b.name));
|
||||
schema.group_attributes.attributes.extend_from_slice(&[
|
||||
AttributeSchema {
|
||||
name: "group_id".into(),
|
||||
attribute_type: AttributeType::Integer,
|
||||
is_list: false,
|
||||
is_visible: true,
|
||||
is_editable: false,
|
||||
is_hardcoded: true,
|
||||
},
|
||||
AttributeSchema {
|
||||
name: "creation_date".into(),
|
||||
attribute_type: AttributeType::DateTime,
|
||||
is_list: false,
|
||||
is_visible: true,
|
||||
is_editable: false,
|
||||
is_hardcoded: true,
|
||||
},
|
||||
AttributeSchema {
|
||||
name: "uuid".into(),
|
||||
attribute_type: AttributeType::String,
|
||||
is_list: false,
|
||||
is_visible: true,
|
||||
is_editable: false,
|
||||
is_hardcoded: true,
|
||||
},
|
||||
AttributeSchema {
|
||||
name: "display_name".into(),
|
||||
attribute_type: AttributeType::String,
|
||||
is_list: false,
|
||||
is_visible: true,
|
||||
is_editable: true,
|
||||
is_hardcoded: true,
|
||||
},
|
||||
]);
|
||||
schema
|
||||
.group_attributes
|
||||
.attributes
|
||||
.sort_by(|a, b| a.name.cmp(&b.name));
|
||||
PublicSchema(schema)
|
||||
}
|
||||
}
|
||||
@@ -23,7 +23,7 @@ pub mod tests {
|
||||
use crate::{
|
||||
domain::{
|
||||
handler::{
|
||||
CreateGroupRequest, CreateUserRequest, GroupBackendHandler, UserBackendHandler,
|
||||
CreateUserRequest, GroupBackendHandler, UserBackendHandler,
|
||||
UserListerBackendHandler, UserRequestFilter,
|
||||
},
|
||||
sql_tables::init_table,
|
||||
@@ -32,7 +32,6 @@ pub mod tests {
|
||||
infra::configuration::ConfigurationBuilder,
|
||||
};
|
||||
use lldap_auth::{opaque, registration};
|
||||
use pretty_assertions::assert_eq;
|
||||
use sea_orm::Database;
|
||||
|
||||
pub fn get_default_config() -> Configuration {
|
||||
@@ -60,10 +59,10 @@ pub mod tests {
|
||||
insert_user_no_password(handler, name).await;
|
||||
let mut rng = rand::rngs::OsRng;
|
||||
let client_registration_start =
|
||||
opaque::client::registration::start_registration(pass.as_bytes(), &mut rng).unwrap();
|
||||
opaque::client::registration::start_registration(pass, &mut rng).unwrap();
|
||||
let response = handler
|
||||
.registration_start(registration::ClientRegistrationStartRequest {
|
||||
username: name.into(),
|
||||
username: name.to_string(),
|
||||
registration_start_request: client_registration_start.message,
|
||||
})
|
||||
.await
|
||||
@@ -87,7 +86,7 @@ pub mod tests {
|
||||
handler
|
||||
.create_user(CreateUserRequest {
|
||||
user_id: UserId::new(name),
|
||||
email: format!("{}@bob.bob", name).into(),
|
||||
email: format!("{}@bob.bob", name),
|
||||
display_name: Some("display ".to_string() + name),
|
||||
first_name: Some("first ".to_string() + name),
|
||||
last_name: Some("last ".to_string() + name),
|
||||
@@ -98,13 +97,7 @@ pub mod tests {
|
||||
}
|
||||
|
||||
pub async fn insert_group(handler: &SqlBackendHandler, name: &str) -> GroupId {
|
||||
handler
|
||||
.create_group(CreateGroupRequest {
|
||||
display_name: name.into(),
|
||||
..Default::default()
|
||||
})
|
||||
.await
|
||||
.unwrap()
|
||||
handler.create_group(name).await.unwrap()
|
||||
}
|
||||
|
||||
pub async fn insert_membership(handler: &SqlBackendHandler, group_id: GroupId, user_id: &str) {
|
||||
|
||||
@@ -1,33 +1,19 @@
|
||||
use crate::domain::{
|
||||
error::{DomainError, Result},
|
||||
handler::{
|
||||
CreateGroupRequest, GroupBackendHandler, GroupListerBackendHandler, GroupRequestFilter,
|
||||
UpdateGroupRequest,
|
||||
GroupBackendHandler, GroupListerBackendHandler, GroupRequestFilter, UpdateGroupRequest,
|
||||
},
|
||||
model::{self, GroupColumn, MembershipColumn},
|
||||
sql_backend_handler::SqlBackendHandler,
|
||||
types::{AttributeName, AttributeValue, Group, GroupDetails, GroupId, Serialized, Uuid},
|
||||
types::{Group, GroupDetails, GroupId, Uuid},
|
||||
};
|
||||
use async_trait::async_trait;
|
||||
use sea_orm::{
|
||||
sea_query::{Alias, Cond, Expr, Func, IntoCondition, OnConflict, SimpleExpr},
|
||||
ActiveModelTrait, ColumnTrait, DatabaseTransaction, EntityTrait, QueryFilter, QueryOrder,
|
||||
QuerySelect, QueryTrait, Set, TransactionTrait,
|
||||
sea_query::{Alias, Cond, Expr, Func, IntoCondition, SimpleExpr},
|
||||
ActiveModelTrait, ActiveValue, ColumnTrait, EntityTrait, QueryFilter, QueryOrder, QuerySelect,
|
||||
QueryTrait,
|
||||
};
|
||||
use tracing::instrument;
|
||||
|
||||
fn attribute_condition(name: AttributeName, value: Serialized) -> Cond {
|
||||
Expr::in_subquery(
|
||||
Expr::col(GroupColumn::GroupId.as_column_ref()),
|
||||
model::GroupAttributes::find()
|
||||
.select_only()
|
||||
.column(model::GroupAttributesColumn::GroupId)
|
||||
.filter(model::GroupAttributesColumn::AttributeName.eq(name))
|
||||
.filter(model::GroupAttributesColumn::Value.eq(value))
|
||||
.into_query(),
|
||||
)
|
||||
.into_condition()
|
||||
}
|
||||
use tracing::{debug, instrument};
|
||||
|
||||
fn get_group_filter_expr(filter: GroupRequestFilter) -> Cond {
|
||||
use GroupRequestFilter::*;
|
||||
@@ -50,9 +36,7 @@ fn get_group_filter_expr(filter: GroupRequestFilter) -> Cond {
|
||||
}
|
||||
}
|
||||
Not(f) => get_group_filter_expr(*f).not(),
|
||||
DisplayName(name) => GroupColumn::LowercaseDisplayName
|
||||
.eq(name.as_str().to_lowercase())
|
||||
.into_condition(),
|
||||
DisplayName(name) => GroupColumn::DisplayName.eq(name).into_condition(),
|
||||
GroupId(id) => GroupColumn::GroupId.eq(id.0).into_condition(),
|
||||
Uuid(uuid) => GroupColumn::Uuid.eq(uuid.to_string()).into_condition(),
|
||||
// WHERE (group_id in (SELECT group_id FROM memberships WHERE user_id = user))
|
||||
@@ -71,16 +55,17 @@ fn get_group_filter_expr(filter: GroupRequestFilter) -> Cond {
|
||||
))))
|
||||
.like(filter.to_sql_filter())
|
||||
.into_condition(),
|
||||
AttributeEquality(name, value) => attribute_condition(name, value),
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl GroupListerBackendHandler for SqlBackendHandler {
|
||||
#[instrument(skip(self), level = "debug", ret, err)]
|
||||
#[instrument(skip_all, level = "debug", ret, err)]
|
||||
async fn list_groups(&self, filters: Option<GroupRequestFilter>) -> Result<Vec<Group>> {
|
||||
debug!(?filters);
|
||||
let results = model::Group::find()
|
||||
.order_by_asc(GroupColumn::GroupId)
|
||||
// The order_by must be before find_with_related otherwise the primary order is by group_id.
|
||||
.order_by_asc(GroupColumn::DisplayName)
|
||||
.find_with_related(model::Membership)
|
||||
.filter(
|
||||
filters
|
||||
@@ -100,7 +85,7 @@ impl GroupListerBackendHandler for SqlBackendHandler {
|
||||
)
|
||||
.all(&self.sql_pool)
|
||||
.await?;
|
||||
let mut groups: Vec<_> = results
|
||||
Ok(results
|
||||
.into_iter()
|
||||
.map(|(group, users)| {
|
||||
let users: Vec<_> = users.into_iter().map(|u| u.user_id).collect();
|
||||
@@ -109,114 +94,54 @@ impl GroupListerBackendHandler for SqlBackendHandler {
|
||||
..group.into()
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
let group_ids = groups.iter().map(|u| &u.id);
|
||||
let attributes = model::GroupAttributes::find()
|
||||
.filter(model::GroupAttributesColumn::GroupId.is_in(group_ids))
|
||||
.order_by_asc(model::GroupAttributesColumn::GroupId)
|
||||
.order_by_asc(model::GroupAttributesColumn::AttributeName)
|
||||
.all(&self.sql_pool)
|
||||
.await?;
|
||||
let mut attributes_iter = attributes.into_iter().peekable();
|
||||
use itertools::Itertools; // For take_while_ref
|
||||
for group in groups.iter_mut() {
|
||||
assert!(attributes_iter
|
||||
.peek()
|
||||
.map(|u| u.group_id >= group.id)
|
||||
.unwrap_or(true),
|
||||
"Attributes are not sorted, groups are not sorted, or previous group didn't consume all the attributes");
|
||||
|
||||
group.attributes = attributes_iter
|
||||
.take_while_ref(|u| u.group_id == group.id)
|
||||
.map(AttributeValue::from)
|
||||
.collect();
|
||||
}
|
||||
groups.sort_by(|g1, g2| g1.display_name.cmp(&g2.display_name));
|
||||
Ok(groups)
|
||||
.collect())
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl GroupBackendHandler for SqlBackendHandler {
|
||||
#[instrument(skip(self), level = "debug", ret, err)]
|
||||
#[instrument(skip_all, level = "debug", ret, err)]
|
||||
async fn get_group_details(&self, group_id: GroupId) -> Result<GroupDetails> {
|
||||
let mut group_details = model::Group::find_by_id(group_id)
|
||||
debug!(?group_id);
|
||||
model::Group::find_by_id(group_id)
|
||||
.into_model::<GroupDetails>()
|
||||
.one(&self.sql_pool)
|
||||
.await?
|
||||
.map(Into::<GroupDetails>::into)
|
||||
.ok_or_else(|| DomainError::EntityNotFound(format!("{:?}", group_id)))?;
|
||||
let attributes = model::GroupAttributes::find()
|
||||
.filter(model::GroupAttributesColumn::GroupId.eq(group_details.group_id))
|
||||
.order_by_asc(model::GroupAttributesColumn::AttributeName)
|
||||
.all(&self.sql_pool)
|
||||
.await?;
|
||||
group_details.attributes = attributes.into_iter().map(AttributeValue::from).collect();
|
||||
Ok(group_details)
|
||||
.ok_or_else(|| DomainError::EntityNotFound(format!("{:?}", group_id)))
|
||||
}
|
||||
|
||||
#[instrument(skip(self), level = "debug", err, fields(group_id = ?request.group_id))]
|
||||
#[instrument(skip_all, level = "debug", err)]
|
||||
async fn update_group(&self, request: UpdateGroupRequest) -> Result<()> {
|
||||
Ok(self
|
||||
.sql_pool
|
||||
.transaction::<_, (), DomainError>(|transaction| {
|
||||
Box::pin(
|
||||
async move { Self::update_group_with_transaction(request, transaction).await },
|
||||
)
|
||||
})
|
||||
.await?)
|
||||
}
|
||||
|
||||
#[instrument(skip(self), level = "debug", ret, err)]
|
||||
async fn create_group(&self, request: CreateGroupRequest) -> Result<GroupId> {
|
||||
let now = chrono::Utc::now().naive_utc();
|
||||
let uuid = Uuid::from_name_and_date(request.display_name.as_str(), &now);
|
||||
let lower_display_name = request.display_name.as_str().to_lowercase();
|
||||
let new_group = model::groups::ActiveModel {
|
||||
display_name: Set(request.display_name),
|
||||
lowercase_display_name: Set(lower_display_name),
|
||||
creation_date: Set(now),
|
||||
uuid: Set(uuid),
|
||||
debug!(?request.group_id);
|
||||
let update_group = model::groups::ActiveModel {
|
||||
group_id: ActiveValue::Set(request.group_id),
|
||||
display_name: request
|
||||
.display_name
|
||||
.map(ActiveValue::Set)
|
||||
.unwrap_or_default(),
|
||||
..Default::default()
|
||||
};
|
||||
Ok(self
|
||||
.sql_pool
|
||||
.transaction::<_, GroupId, DomainError>(|transaction| {
|
||||
Box::pin(async move {
|
||||
let schema = Self::get_schema_with_transaction(transaction).await?;
|
||||
let group_id = new_group.insert(transaction).await?.group_id;
|
||||
let mut new_group_attributes = Vec::new();
|
||||
for attribute in request.attributes {
|
||||
if schema
|
||||
.group_attributes
|
||||
.get_attribute_type(&attribute.name)
|
||||
.is_some()
|
||||
{
|
||||
new_group_attributes.push(model::group_attributes::ActiveModel {
|
||||
group_id: Set(group_id),
|
||||
attribute_name: Set(attribute.name),
|
||||
value: Set(attribute.value),
|
||||
});
|
||||
} else {
|
||||
return Err(DomainError::InternalError(format!(
|
||||
"Attribute name {} doesn't exist in the group schema,
|
||||
yet was attempted to be inserted in the database",
|
||||
&attribute.name
|
||||
)));
|
||||
}
|
||||
}
|
||||
if !new_group_attributes.is_empty() {
|
||||
model::GroupAttributes::insert_many(new_group_attributes)
|
||||
.exec(transaction)
|
||||
.await?;
|
||||
}
|
||||
Ok(group_id)
|
||||
})
|
||||
})
|
||||
.await?)
|
||||
update_group.update(&self.sql_pool).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[instrument(skip(self), level = "debug", err)]
|
||||
#[instrument(skip_all, level = "debug", ret, err)]
|
||||
async fn create_group(&self, group_name: &str) -> Result<GroupId> {
|
||||
debug!(?group_name);
|
||||
let now = chrono::Utc::now().naive_utc();
|
||||
let uuid = Uuid::from_name_and_date(group_name, &now);
|
||||
let new_group = model::groups::ActiveModel {
|
||||
display_name: ActiveValue::Set(group_name.to_owned()),
|
||||
creation_date: ActiveValue::Set(now),
|
||||
uuid: ActiveValue::Set(uuid),
|
||||
..Default::default()
|
||||
};
|
||||
Ok(new_group.insert(&self.sql_pool).await?.group_id)
|
||||
}
|
||||
|
||||
#[instrument(skip_all, level = "debug", err)]
|
||||
async fn delete_group(&self, group_id: GroupId) -> Result<()> {
|
||||
debug!(?group_id);
|
||||
let res = model::Group::delete_by_id(group_id)
|
||||
.exec(&self.sql_pool)
|
||||
.await?;
|
||||
@@ -230,90 +155,10 @@ impl GroupBackendHandler for SqlBackendHandler {
|
||||
}
|
||||
}
|
||||
|
||||
impl SqlBackendHandler {
|
||||
async fn update_group_with_transaction(
|
||||
request: UpdateGroupRequest,
|
||||
transaction: &DatabaseTransaction,
|
||||
) -> Result<()> {
|
||||
let lower_display_name = request
|
||||
.display_name
|
||||
.as_ref()
|
||||
.map(|s| s.as_str().to_lowercase());
|
||||
let update_group = model::groups::ActiveModel {
|
||||
group_id: Set(request.group_id),
|
||||
display_name: request.display_name.map(Set).unwrap_or_default(),
|
||||
lowercase_display_name: lower_display_name.map(Set).unwrap_or_default(),
|
||||
..Default::default()
|
||||
};
|
||||
update_group.update(transaction).await?;
|
||||
let mut update_group_attributes = Vec::new();
|
||||
let mut remove_group_attributes = Vec::new();
|
||||
let schema = Self::get_schema_with_transaction(transaction).await?;
|
||||
for attribute in request.insert_attributes {
|
||||
if schema
|
||||
.group_attributes
|
||||
.get_attribute_type(&attribute.name)
|
||||
.is_some()
|
||||
{
|
||||
update_group_attributes.push(model::group_attributes::ActiveModel {
|
||||
group_id: Set(request.group_id),
|
||||
attribute_name: Set(attribute.name.to_owned()),
|
||||
value: Set(attribute.value),
|
||||
});
|
||||
} else {
|
||||
return Err(DomainError::InternalError(format!(
|
||||
"Group attribute name {} doesn't exist in the schema, yet was attempted to be inserted in the database",
|
||||
&attribute.name
|
||||
)));
|
||||
}
|
||||
}
|
||||
for attribute in request.delete_attributes {
|
||||
if schema
|
||||
.group_attributes
|
||||
.get_attribute_type(&attribute)
|
||||
.is_some()
|
||||
{
|
||||
remove_group_attributes.push(attribute);
|
||||
} else {
|
||||
return Err(DomainError::InternalError(format!(
|
||||
"Group attribute name {} doesn't exist in the schema, yet was attempted to be removed from the database",
|
||||
attribute
|
||||
)));
|
||||
}
|
||||
}
|
||||
if !remove_group_attributes.is_empty() {
|
||||
model::GroupAttributes::delete_many()
|
||||
.filter(model::GroupAttributesColumn::GroupId.eq(request.group_id))
|
||||
.filter(model::GroupAttributesColumn::AttributeName.is_in(remove_group_attributes))
|
||||
.exec(transaction)
|
||||
.await?;
|
||||
}
|
||||
if !update_group_attributes.is_empty() {
|
||||
model::GroupAttributes::insert_many(update_group_attributes)
|
||||
.on_conflict(
|
||||
OnConflict::columns([
|
||||
model::GroupAttributesColumn::GroupId,
|
||||
model::GroupAttributesColumn::AttributeName,
|
||||
])
|
||||
.update_column(model::GroupAttributesColumn::Value)
|
||||
.to_owned(),
|
||||
)
|
||||
.exec(transaction)
|
||||
.await?;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::domain::{
|
||||
handler::{CreateAttributeRequest, SchemaBackendHandler, SubStringFilter},
|
||||
sql_backend_handler::tests::*,
|
||||
types::{AttributeType, GroupName, Serialized, UserId},
|
||||
};
|
||||
use pretty_assertions::assert_eq;
|
||||
use crate::domain::{handler::SubStringFilter, sql_backend_handler::tests::*, types::UserId};
|
||||
|
||||
async fn get_group_ids(
|
||||
handler: &SqlBackendHandler,
|
||||
@@ -331,7 +176,7 @@ mod tests {
|
||||
async fn get_group_names(
|
||||
handler: &SqlBackendHandler,
|
||||
filters: Option<GroupRequestFilter>,
|
||||
) -> Vec<GroupName> {
|
||||
) -> Vec<String> {
|
||||
handler
|
||||
.list_groups(filters)
|
||||
.await
|
||||
@@ -347,9 +192,9 @@ mod tests {
|
||||
assert_eq!(
|
||||
get_group_names(&fixture.handler, None).await,
|
||||
vec![
|
||||
"Best Group".into(),
|
||||
"Empty Group".into(),
|
||||
"Worst Group".into()
|
||||
"Best Group".to_owned(),
|
||||
"Empty Group".to_owned(),
|
||||
"Worst Group".to_owned()
|
||||
]
|
||||
);
|
||||
}
|
||||
@@ -361,25 +206,12 @@ mod tests {
|
||||
get_group_names(
|
||||
&fixture.handler,
|
||||
Some(GroupRequestFilter::Or(vec![
|
||||
GroupRequestFilter::DisplayName("Empty Group".into()),
|
||||
GroupRequestFilter::DisplayName("Empty Group".to_owned()),
|
||||
GroupRequestFilter::Member(UserId::new("bob")),
|
||||
]))
|
||||
)
|
||||
.await,
|
||||
vec!["Best Group".into(), "Empty Group".into()]
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_list_groups_case_insensitive_filter() {
|
||||
let fixture = TestFixture::new().await;
|
||||
assert_eq!(
|
||||
get_group_names(
|
||||
&fixture.handler,
|
||||
Some(GroupRequestFilter::DisplayName("eMpTy gRoup".into()),)
|
||||
)
|
||||
.await,
|
||||
vec!["Empty Group".into()]
|
||||
vec!["Best Group".to_owned(), "Empty Group".to_owned()]
|
||||
);
|
||||
}
|
||||
|
||||
@@ -391,7 +223,7 @@ mod tests {
|
||||
&fixture.handler,
|
||||
Some(GroupRequestFilter::And(vec![
|
||||
GroupRequestFilter::Not(Box::new(GroupRequestFilter::DisplayName(
|
||||
"value".into()
|
||||
"value".to_owned()
|
||||
))),
|
||||
GroupRequestFilter::GroupId(fixture.groups[0]),
|
||||
]))
|
||||
@@ -419,46 +251,6 @@ mod tests {
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_list_groups_other_filter() {
|
||||
let fixture = TestFixture::new().await;
|
||||
fixture
|
||||
.handler
|
||||
.add_group_attribute(CreateAttributeRequest {
|
||||
name: "gid".into(),
|
||||
attribute_type: AttributeType::Integer,
|
||||
is_list: false,
|
||||
is_visible: true,
|
||||
is_editable: true,
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
fixture
|
||||
.handler
|
||||
.update_group(UpdateGroupRequest {
|
||||
group_id: fixture.groups[0],
|
||||
display_name: None,
|
||||
delete_attributes: Vec::new(),
|
||||
insert_attributes: vec![AttributeValue {
|
||||
name: "gid".into(),
|
||||
value: Serialized::from(&512),
|
||||
}],
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(
|
||||
get_group_ids(
|
||||
&fixture.handler,
|
||||
Some(GroupRequestFilter::AttributeEquality(
|
||||
AttributeName::from("gid"),
|
||||
Serialized::from(&512),
|
||||
)),
|
||||
)
|
||||
.await,
|
||||
vec![fixture.groups[0]]
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_get_group_details() {
|
||||
let fixture = TestFixture::new().await;
|
||||
@@ -468,7 +260,7 @@ mod tests {
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(details.group_id, fixture.groups[0]);
|
||||
assert_eq!(details.display_name, "Best Group".into());
|
||||
assert_eq!(details.display_name, "Best Group");
|
||||
assert_eq!(
|
||||
get_group_ids(
|
||||
&fixture.handler,
|
||||
@@ -486,9 +278,7 @@ mod tests {
|
||||
.handler
|
||||
.update_group(UpdateGroupRequest {
|
||||
group_id: fixture.groups[0],
|
||||
display_name: Some("Awesomest Group".into()),
|
||||
delete_attributes: Vec::new(),
|
||||
insert_attributes: Vec::new(),
|
||||
display_name: Some("Awesomest Group".to_owned()),
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
@@ -497,7 +287,7 @@ mod tests {
|
||||
.get_group_details(fixture.groups[0])
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(details.display_name, "Awesomest Group".into());
|
||||
assert_eq!(details.display_name, "Awesomest Group");
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
@@ -517,93 +307,4 @@ mod tests {
|
||||
vec![fixture.groups[2], fixture.groups[1]]
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_create_group() {
|
||||
let fixture = TestFixture::new().await;
|
||||
assert_eq!(
|
||||
get_group_ids(&fixture.handler, None).await,
|
||||
vec![fixture.groups[0], fixture.groups[2], fixture.groups[1]]
|
||||
);
|
||||
fixture
|
||||
.handler
|
||||
.add_group_attribute(CreateAttributeRequest {
|
||||
name: "new_attribute".into(),
|
||||
attribute_type: AttributeType::String,
|
||||
is_list: false,
|
||||
is_visible: true,
|
||||
is_editable: true,
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
let new_group_id = fixture
|
||||
.handler
|
||||
.create_group(CreateGroupRequest {
|
||||
display_name: "New Group".into(),
|
||||
attributes: vec![AttributeValue {
|
||||
name: "new_attribute".into(),
|
||||
value: Serialized::from("value"),
|
||||
}],
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
let group_details = fixture
|
||||
.handler
|
||||
.get_group_details(new_group_id)
|
||||
.await
|
||||
.unwrap();
|
||||
assert_eq!(group_details.display_name, "New Group".into());
|
||||
assert_eq!(
|
||||
group_details.attributes,
|
||||
vec![AttributeValue {
|
||||
name: "new_attribute".into(),
|
||||
value: Serialized::from("value"),
|
||||
}]
|
||||
);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_set_group_attributes() {
|
||||
let fixture = TestFixture::new().await;
|
||||
fixture
|
||||
.handler
|
||||
.add_group_attribute(CreateAttributeRequest {
|
||||
name: "new_attribute".into(),
|
||||
attribute_type: AttributeType::Integer,
|
||||
is_list: false,
|
||||
is_visible: true,
|
||||
is_editable: true,
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
let group_id = fixture.groups[0];
|
||||
let attributes = vec![AttributeValue {
|
||||
name: "new_attribute".into(),
|
||||
value: Serialized::from(&42i64),
|
||||
}];
|
||||
fixture
|
||||
.handler
|
||||
.update_group(UpdateGroupRequest {
|
||||
group_id,
|
||||
display_name: None,
|
||||
delete_attributes: Vec::new(),
|
||||
insert_attributes: attributes.clone(),
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
let details = fixture.handler.get_group_details(group_id).await.unwrap();
|
||||
assert_eq!(details.attributes, attributes);
|
||||
fixture
|
||||
.handler
|
||||
.update_group(UpdateGroupRequest {
|
||||
group_id,
|
||||
display_name: None,
|
||||
delete_attributes: vec!["new_attribute".into()],
|
||||
insert_attributes: Vec::new(),
|
||||
})
|
||||
.await
|
||||
.unwrap();
|
||||
let details = fixture.handler.get_group_details(group_id).await.unwrap();
|
||||
assert_eq!(details.attributes, Vec::new());
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,25 +1,23 @@
|
||||
use crate::domain::{
|
||||
sql_tables::{DbConnection, SchemaVersion, LAST_SCHEMA_VERSION},
|
||||
types::{AttributeType, GroupId, JpegPhoto, Serialized, UserId, Uuid},
|
||||
sql_tables::{DbConnection, SchemaVersion},
|
||||
types::{GroupId, UserId, Uuid},
|
||||
};
|
||||
use anyhow::Context;
|
||||
use itertools::Itertools;
|
||||
use sea_orm::{
|
||||
sea_query::{
|
||||
self, all, Alias, BinOper, BlobSize::Blob, ColumnDef, Expr, ForeignKey, ForeignKeyAction,
|
||||
Func, Index, Query, SimpleExpr, Table, Value,
|
||||
self, all, ColumnDef, Expr, ForeignKey, ForeignKeyAction, Func, Index, Query, Table, Value,
|
||||
},
|
||||
ConnectionTrait, DatabaseTransaction, DbErr, DeriveIden, FromQueryResult, Iden, Order,
|
||||
Statement, TransactionTrait,
|
||||
ConnectionTrait, FromQueryResult, Iden, Order, Statement, TransactionTrait,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tracing::{error, info, instrument, warn};
|
||||
use tracing::{info, instrument, warn};
|
||||
|
||||
#[derive(DeriveIden, PartialEq, Eq, Debug, Serialize, Deserialize, Clone, Copy)]
|
||||
#[derive(Iden, PartialEq, Eq, Debug, Serialize, Deserialize, Clone, Copy)]
|
||||
pub enum Users {
|
||||
Table,
|
||||
UserId,
|
||||
Email,
|
||||
LowercaseEmail,
|
||||
DisplayName,
|
||||
FirstName,
|
||||
LastName,
|
||||
@@ -31,71 +29,28 @@ pub enum Users {
|
||||
Uuid,
|
||||
}
|
||||
|
||||
#[derive(DeriveIden, PartialEq, Eq, Debug, Serialize, Deserialize, Clone, Copy)]
|
||||
#[derive(Iden, PartialEq, Eq, Debug, Serialize, Deserialize, Clone, Copy)]
|
||||
pub enum Groups {
|
||||
Table,
|
||||
GroupId,
|
||||
DisplayName,
|
||||
LowercaseDisplayName,
|
||||
CreationDate,
|
||||
Uuid,
|
||||
}
|
||||
|
||||
#[derive(DeriveIden, Clone, Copy)]
|
||||
#[derive(Iden, Clone, Copy)]
|
||||
pub enum Memberships {
|
||||
Table,
|
||||
UserId,
|
||||
GroupId,
|
||||
}
|
||||
|
||||
#[allow(clippy::enum_variant_names)] // The table names are generated from the enum.
|
||||
#[derive(DeriveIden, PartialEq, Eq, Debug, Serialize, Deserialize, Clone, Copy)]
|
||||
pub enum UserAttributeSchema {
|
||||
Table,
|
||||
UserAttributeSchemaName,
|
||||
UserAttributeSchemaType,
|
||||
UserAttributeSchemaIsList,
|
||||
UserAttributeSchemaIsUserVisible,
|
||||
UserAttributeSchemaIsUserEditable,
|
||||
UserAttributeSchemaIsHardcoded,
|
||||
}
|
||||
|
||||
#[derive(DeriveIden, PartialEq, Eq, Debug, Serialize, Deserialize, Clone, Copy)]
|
||||
pub enum UserAttributes {
|
||||
Table,
|
||||
UserAttributeUserId,
|
||||
UserAttributeName,
|
||||
UserAttributeValue,
|
||||
}
|
||||
|
||||
#[allow(clippy::enum_variant_names)] // The table names are generated from the enum.
|
||||
#[derive(DeriveIden, PartialEq, Eq, Debug, Serialize, Deserialize, Clone, Copy)]
|
||||
pub enum GroupAttributeSchema {
|
||||
Table,
|
||||
GroupAttributeSchemaName,
|
||||
GroupAttributeSchemaType,
|
||||
GroupAttributeSchemaIsList,
|
||||
GroupAttributeSchemaIsGroupVisible,
|
||||
GroupAttributeSchemaIsGroupEditable,
|
||||
GroupAttributeSchemaIsHardcoded,
|
||||
}
|
||||
|
||||
#[derive(DeriveIden, PartialEq, Eq, Debug, Serialize, Deserialize, Clone, Copy)]
|
||||
pub enum GroupAttributes {
|
||||
Table,
|
||||
GroupAttributeGroupId,
|
||||
GroupAttributeName,
|
||||
GroupAttributeValue,
|
||||
}
|
||||
|
||||
// Metadata about the SQL DB.
|
||||
#[derive(DeriveIden)]
|
||||
#[derive(Iden)]
|
||||
pub enum Metadata {
|
||||
Table,
|
||||
// Which version of the schema we're at.
|
||||
Version,
|
||||
PrivateKeyHash,
|
||||
PrivateKeyLocation,
|
||||
}
|
||||
|
||||
#[derive(FromQueryResult, PartialEq, Eq, Debug)]
|
||||
@@ -382,64 +337,72 @@ pub async fn upgrade_to_v1(pool: &DbConnection) -> std::result::Result<(), sea_o
|
||||
}
|
||||
|
||||
async fn replace_column<I: Iden + Copy + 'static, const N: usize>(
|
||||
transaction: DatabaseTransaction,
|
||||
pool: &DbConnection,
|
||||
table_name: I,
|
||||
column_name: I,
|
||||
mut new_column: ColumnDef,
|
||||
update_values: [Statement; N],
|
||||
) -> Result<DatabaseTransaction, DbErr> {
|
||||
) -> anyhow::Result<()> {
|
||||
// Update the definition of a column (in a compatible way). Due to Sqlite, this is more complicated:
|
||||
// - rename the column to a temporary name
|
||||
// - create the column with the new definition
|
||||
// - copy the data from the temp column to the new one
|
||||
// - update the new one if there are changes needed
|
||||
// - drop the old one
|
||||
let builder = transaction.get_database_backend();
|
||||
#[derive(DeriveIden)]
|
||||
enum TempTable {
|
||||
TempName,
|
||||
}
|
||||
transaction
|
||||
.execute(
|
||||
builder.build(
|
||||
Table::alter()
|
||||
.table(table_name)
|
||||
.rename_column(column_name, TempTable::TempName),
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
transaction
|
||||
.execute(builder.build(Table::alter().table(table_name).add_column(&mut new_column)))
|
||||
.await?;
|
||||
transaction
|
||||
.execute(
|
||||
builder.build(
|
||||
Query::update()
|
||||
.table(table_name)
|
||||
.value(column_name, Expr::col((table_name, TempTable::TempName))),
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
for statement in update_values {
|
||||
transaction.execute(statement).await?;
|
||||
}
|
||||
transaction
|
||||
.execute(
|
||||
builder.build(
|
||||
Table::alter()
|
||||
.table(table_name)
|
||||
.drop_column(TempTable::TempName),
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
Ok(transaction)
|
||||
let builder = pool.get_database_backend();
|
||||
pool.transaction::<_, (), sea_orm::DbErr>(move |transaction| {
|
||||
Box::pin(async move {
|
||||
#[derive(Iden)]
|
||||
enum TempTable {
|
||||
TempName,
|
||||
}
|
||||
transaction
|
||||
.execute(
|
||||
builder.build(
|
||||
Table::alter()
|
||||
.table(table_name)
|
||||
.rename_column(column_name, TempTable::TempName),
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
transaction
|
||||
.execute(
|
||||
builder.build(Table::alter().table(table_name).add_column(&mut new_column)),
|
||||
)
|
||||
.await?;
|
||||
transaction
|
||||
.execute(
|
||||
builder.build(
|
||||
Query::update()
|
||||
.table(table_name)
|
||||
.value(column_name, Expr::col((table_name, TempTable::TempName))),
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
for statement in update_values {
|
||||
transaction.execute(statement).await?;
|
||||
}
|
||||
transaction
|
||||
.execute(
|
||||
builder.build(
|
||||
Table::alter()
|
||||
.table(table_name)
|
||||
.drop_column(TempTable::TempName),
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
Ok(())
|
||||
})
|
||||
})
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn migrate_to_v2(transaction: DatabaseTransaction) -> Result<DatabaseTransaction, DbErr> {
|
||||
let builder = transaction.get_database_backend();
|
||||
async fn migrate_to_v2(pool: &DbConnection) -> anyhow::Result<()> {
|
||||
let builder = pool.get_database_backend();
|
||||
// Allow nulls in DisplayName, and change empty string to null.
|
||||
let transaction = replace_column(
|
||||
transaction,
|
||||
replace_column(
|
||||
pool,
|
||||
Users::Table,
|
||||
Users::DisplayName,
|
||||
ColumnDef::new(Users::DisplayName)
|
||||
@@ -453,14 +416,14 @@ async fn migrate_to_v2(transaction: DatabaseTransaction) -> Result<DatabaseTrans
|
||||
)],
|
||||
)
|
||||
.await?;
|
||||
Ok(transaction)
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn migrate_to_v3(transaction: DatabaseTransaction) -> Result<DatabaseTransaction, DbErr> {
|
||||
let builder = transaction.get_database_backend();
|
||||
async fn migrate_to_v3(pool: &DbConnection) -> anyhow::Result<()> {
|
||||
let builder = pool.get_database_backend();
|
||||
// Allow nulls in First and LastName. Users who created their DB in 0.4.1 have the not null constraint.
|
||||
let transaction = replace_column(
|
||||
transaction,
|
||||
replace_column(
|
||||
pool,
|
||||
Users::Table,
|
||||
Users::FirstName,
|
||||
ColumnDef::new(Users::FirstName).string_len(255).to_owned(),
|
||||
@@ -472,8 +435,8 @@ async fn migrate_to_v3(transaction: DatabaseTransaction) -> Result<DatabaseTrans
|
||||
)],
|
||||
)
|
||||
.await?;
|
||||
let transaction = replace_column(
|
||||
transaction,
|
||||
replace_column(
|
||||
pool,
|
||||
Users::Table,
|
||||
Users::LastName,
|
||||
ColumnDef::new(Users::LastName).string_len(255).to_owned(),
|
||||
@@ -486,8 +449,8 @@ async fn migrate_to_v3(transaction: DatabaseTransaction) -> Result<DatabaseTrans
|
||||
)
|
||||
.await?;
|
||||
// Change Avatar from binary to blob(long), because for MySQL this is 64kb.
|
||||
let transaction = replace_column(
|
||||
transaction,
|
||||
replace_column(
|
||||
pool,
|
||||
Users::Table,
|
||||
Users::Avatar,
|
||||
ColumnDef::new(Users::Avatar)
|
||||
@@ -496,13 +459,13 @@ async fn migrate_to_v3(transaction: DatabaseTransaction) -> Result<DatabaseTrans
|
||||
[],
|
||||
)
|
||||
.await?;
|
||||
Ok(transaction)
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn migrate_to_v4(transaction: DatabaseTransaction) -> Result<DatabaseTransaction, DbErr> {
|
||||
let builder = transaction.get_database_backend();
|
||||
async fn migrate_to_v4(pool: &DbConnection) -> anyhow::Result<()> {
|
||||
let builder = pool.get_database_backend();
|
||||
// Make emails and UUIDs unique.
|
||||
if let Err(e) = transaction
|
||||
if let Err(e) = pool
|
||||
.execute(
|
||||
builder.build(
|
||||
Index::create()
|
||||
@@ -514,16 +477,16 @@ async fn migrate_to_v4(transaction: DatabaseTransaction) -> Result<DatabaseTrans
|
||||
),
|
||||
)
|
||||
.await
|
||||
{
|
||||
error!(
|
||||
r#"Found several users with the same email.
|
||||
.context(
|
||||
r#"while enforcing unicity on emails (2 users have the same email).
|
||||
|
||||
See https://github.com/lldap/lldap/blob/main/docs/migration_guides/v0.5.md for details.
|
||||
|
||||
Conflicting emails:
|
||||
"#,
|
||||
);
|
||||
for (email, users) in &transaction
|
||||
)
|
||||
{
|
||||
warn!("Found several users with the same email:");
|
||||
for (email, users) in &pool
|
||||
.query_all(
|
||||
builder.build(
|
||||
Query::select()
|
||||
@@ -565,478 +528,39 @@ Conflicting emails:
|
||||
}
|
||||
return Err(e);
|
||||
}
|
||||
transaction
|
||||
.execute(
|
||||
builder.build(
|
||||
Index::create()
|
||||
.if_not_exists()
|
||||
.name("unique-user-uuid")
|
||||
.table(Users::Table)
|
||||
.col(Users::Uuid)
|
||||
.unique(),
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
transaction
|
||||
.execute(
|
||||
builder.build(
|
||||
Index::create()
|
||||
.if_not_exists()
|
||||
.name("unique-group-uuid")
|
||||
.table(Groups::Table)
|
||||
.col(Groups::Uuid)
|
||||
.unique(),
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
Ok(transaction)
|
||||
}
|
||||
|
||||
async fn migrate_to_v5(transaction: DatabaseTransaction) -> Result<DatabaseTransaction, DbErr> {
|
||||
let builder = transaction.get_database_backend();
|
||||
transaction
|
||||
.execute(
|
||||
builder.build(
|
||||
Table::create()
|
||||
.table(UserAttributeSchema::Table)
|
||||
.col(
|
||||
ColumnDef::new(UserAttributeSchema::UserAttributeSchemaName)
|
||||
.string_len(64)
|
||||
.not_null()
|
||||
.primary_key(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(UserAttributeSchema::UserAttributeSchemaType)
|
||||
.string_len(64)
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(UserAttributeSchema::UserAttributeSchemaIsList)
|
||||
.boolean()
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(UserAttributeSchema::UserAttributeSchemaIsUserVisible)
|
||||
.boolean()
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(UserAttributeSchema::UserAttributeSchemaIsUserEditable)
|
||||
.boolean()
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(UserAttributeSchema::UserAttributeSchemaIsHardcoded)
|
||||
.boolean()
|
||||
.not_null(),
|
||||
),
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
|
||||
transaction
|
||||
.execute(
|
||||
builder.build(
|
||||
Table::create()
|
||||
.table(GroupAttributeSchema::Table)
|
||||
.col(
|
||||
ColumnDef::new(GroupAttributeSchema::GroupAttributeSchemaName)
|
||||
.string_len(64)
|
||||
.not_null()
|
||||
.primary_key(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(GroupAttributeSchema::GroupAttributeSchemaType)
|
||||
.string_len(64)
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(GroupAttributeSchema::GroupAttributeSchemaIsList)
|
||||
.boolean()
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(GroupAttributeSchema::GroupAttributeSchemaIsGroupVisible)
|
||||
.boolean()
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(GroupAttributeSchema::GroupAttributeSchemaIsGroupEditable)
|
||||
.boolean()
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(GroupAttributeSchema::GroupAttributeSchemaIsHardcoded)
|
||||
.boolean()
|
||||
.not_null(),
|
||||
),
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
|
||||
transaction
|
||||
.execute(
|
||||
builder.build(
|
||||
Table::create()
|
||||
.table(UserAttributes::Table)
|
||||
.col(
|
||||
ColumnDef::new(UserAttributes::UserAttributeUserId)
|
||||
.string_len(255)
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(UserAttributes::UserAttributeName)
|
||||
.string_len(64)
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(UserAttributes::UserAttributeValue)
|
||||
.blob(sea_query::BlobSize::Long)
|
||||
.not_null(),
|
||||
)
|
||||
.foreign_key(
|
||||
ForeignKey::create()
|
||||
.name("UserAttributeUserIdForeignKey")
|
||||
.from(UserAttributes::Table, UserAttributes::UserAttributeUserId)
|
||||
.to(Users::Table, Users::UserId)
|
||||
.on_delete(ForeignKeyAction::Cascade)
|
||||
.on_update(ForeignKeyAction::Cascade),
|
||||
)
|
||||
.foreign_key(
|
||||
ForeignKey::create()
|
||||
.name("UserAttributeNameForeignKey")
|
||||
.from(UserAttributes::Table, UserAttributes::UserAttributeName)
|
||||
.to(
|
||||
UserAttributeSchema::Table,
|
||||
UserAttributeSchema::UserAttributeSchemaName,
|
||||
)
|
||||
.on_delete(ForeignKeyAction::Cascade)
|
||||
.on_update(ForeignKeyAction::Cascade),
|
||||
)
|
||||
.primary_key(
|
||||
Index::create()
|
||||
.col(UserAttributes::UserAttributeUserId)
|
||||
.col(UserAttributes::UserAttributeName),
|
||||
),
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
|
||||
transaction
|
||||
.execute(
|
||||
builder.build(
|
||||
Table::create()
|
||||
.table(GroupAttributes::Table)
|
||||
.col(
|
||||
ColumnDef::new(GroupAttributes::GroupAttributeGroupId)
|
||||
.integer()
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(GroupAttributes::GroupAttributeName)
|
||||
.string_len(64)
|
||||
.not_null(),
|
||||
)
|
||||
.col(
|
||||
ColumnDef::new(GroupAttributes::GroupAttributeValue)
|
||||
.blob(sea_query::BlobSize::Long)
|
||||
.not_null(),
|
||||
)
|
||||
.foreign_key(
|
||||
ForeignKey::create()
|
||||
.name("GroupAttributeGroupIdForeignKey")
|
||||
.from(
|
||||
GroupAttributes::Table,
|
||||
GroupAttributes::GroupAttributeGroupId,
|
||||
)
|
||||
.to(Groups::Table, Groups::GroupId)
|
||||
.on_delete(ForeignKeyAction::Cascade)
|
||||
.on_update(ForeignKeyAction::Cascade),
|
||||
)
|
||||
.foreign_key(
|
||||
ForeignKey::create()
|
||||
.name("GroupAttributeNameForeignKey")
|
||||
.from(GroupAttributes::Table, GroupAttributes::GroupAttributeName)
|
||||
.to(
|
||||
GroupAttributeSchema::Table,
|
||||
GroupAttributeSchema::GroupAttributeSchemaName,
|
||||
)
|
||||
.on_delete(ForeignKeyAction::Cascade)
|
||||
.on_update(ForeignKeyAction::Cascade),
|
||||
)
|
||||
.primary_key(
|
||||
Index::create()
|
||||
.col(GroupAttributes::GroupAttributeGroupId)
|
||||
.col(GroupAttributes::GroupAttributeName),
|
||||
),
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
|
||||
transaction
|
||||
.execute(
|
||||
builder.build(
|
||||
Query::insert()
|
||||
.into_table(UserAttributeSchema::Table)
|
||||
.columns([
|
||||
UserAttributeSchema::UserAttributeSchemaName,
|
||||
UserAttributeSchema::UserAttributeSchemaType,
|
||||
UserAttributeSchema::UserAttributeSchemaIsList,
|
||||
UserAttributeSchema::UserAttributeSchemaIsUserVisible,
|
||||
UserAttributeSchema::UserAttributeSchemaIsUserEditable,
|
||||
UserAttributeSchema::UserAttributeSchemaIsHardcoded,
|
||||
])
|
||||
.values_panic([
|
||||
"first_name".into(),
|
||||
AttributeType::String.into(),
|
||||
false.into(),
|
||||
true.into(),
|
||||
true.into(),
|
||||
true.into(),
|
||||
])
|
||||
.values_panic([
|
||||
"last_name".into(),
|
||||
AttributeType::String.into(),
|
||||
false.into(),
|
||||
true.into(),
|
||||
true.into(),
|
||||
true.into(),
|
||||
])
|
||||
.values_panic([
|
||||
"avatar".into(),
|
||||
AttributeType::JpegPhoto.into(),
|
||||
false.into(),
|
||||
true.into(),
|
||||
true.into(),
|
||||
true.into(),
|
||||
]),
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
|
||||
{
|
||||
let mut user_statement = Query::insert()
|
||||
.into_table(UserAttributes::Table)
|
||||
.columns([
|
||||
UserAttributes::UserAttributeUserId,
|
||||
UserAttributes::UserAttributeName,
|
||||
UserAttributes::UserAttributeValue,
|
||||
])
|
||||
.to_owned();
|
||||
#[derive(FromQueryResult)]
|
||||
struct FullUserDetails {
|
||||
user_id: UserId,
|
||||
first_name: Option<String>,
|
||||
last_name: Option<String>,
|
||||
avatar: Option<JpegPhoto>,
|
||||
}
|
||||
let mut any_user = false;
|
||||
for user in FullUserDetails::find_by_statement(builder.build(
|
||||
Query::select().from(Users::Table).columns([
|
||||
Users::UserId,
|
||||
Users::FirstName,
|
||||
Users::LastName,
|
||||
Users::Avatar,
|
||||
]),
|
||||
))
|
||||
.all(&transaction)
|
||||
.await?
|
||||
{
|
||||
if let Some(name) = &user.first_name {
|
||||
any_user = true;
|
||||
user_statement.values_panic([
|
||||
user.user_id.clone().into(),
|
||||
"first_name".into(),
|
||||
Serialized::from(name).into(),
|
||||
]);
|
||||
}
|
||||
if let Some(name) = &user.last_name {
|
||||
any_user = true;
|
||||
user_statement.values_panic([
|
||||
user.user_id.clone().into(),
|
||||
"last_name".into(),
|
||||
Serialized::from(name).into(),
|
||||
]);
|
||||
}
|
||||
if let Some(avatar) = &user.avatar {
|
||||
any_user = true;
|
||||
user_statement.values_panic([
|
||||
user.user_id.clone().into(),
|
||||
"avatar".into(),
|
||||
Serialized::from(avatar).into(),
|
||||
]);
|
||||
}
|
||||
}
|
||||
|
||||
if any_user {
|
||||
transaction.execute(builder.build(&user_statement)).await?;
|
||||
}
|
||||
}
|
||||
|
||||
for column in [Users::FirstName, Users::LastName, Users::Avatar] {
|
||||
transaction
|
||||
.execute(builder.build(Table::alter().table(Users::Table).drop_column(column)))
|
||||
.await?;
|
||||
}
|
||||
|
||||
Ok(transaction)
|
||||
}
|
||||
|
||||
async fn migrate_to_v6(transaction: DatabaseTransaction) -> Result<DatabaseTransaction, DbErr> {
|
||||
let builder = transaction.get_database_backend();
|
||||
transaction
|
||||
.execute(
|
||||
builder.build(
|
||||
Table::alter().table(Groups::Table).add_column(
|
||||
ColumnDef::new(Groups::LowercaseDisplayName)
|
||||
.string_len(255)
|
||||
.not_null()
|
||||
.default("UNSET"),
|
||||
),
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
transaction
|
||||
.execute(
|
||||
builder.build(
|
||||
Table::alter().table(Users::Table).add_column(
|
||||
ColumnDef::new(Users::LowercaseEmail)
|
||||
.string_len(255)
|
||||
.not_null()
|
||||
.default("UNSET"),
|
||||
),
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
|
||||
transaction
|
||||
.execute(builder.build(Query::update().table(Groups::Table).value(
|
||||
Groups::LowercaseDisplayName,
|
||||
Func::lower(Expr::col(Groups::DisplayName)),
|
||||
)))
|
||||
.await?;
|
||||
|
||||
transaction
|
||||
.execute(
|
||||
builder.build(
|
||||
Query::update()
|
||||
.table(Users::Table)
|
||||
.value(Users::LowercaseEmail, Func::lower(Expr::col(Users::Email))),
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Ok(transaction)
|
||||
}
|
||||
|
||||
async fn migrate_to_v7(transaction: DatabaseTransaction) -> Result<DatabaseTransaction, DbErr> {
|
||||
let builder = transaction.get_database_backend();
|
||||
transaction
|
||||
.execute(
|
||||
builder.build(
|
||||
Table::alter()
|
||||
.table(Metadata::Table)
|
||||
.add_column(ColumnDef::new(Metadata::PrivateKeyHash).blob(Blob(Some(32)))),
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
transaction
|
||||
.execute(
|
||||
builder.build(
|
||||
Table::alter()
|
||||
.table(Metadata::Table)
|
||||
.add_column(ColumnDef::new(Metadata::PrivateKeyLocation).string_len(255)),
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
Ok(transaction)
|
||||
}
|
||||
|
||||
async fn migrate_to_v8(transaction: DatabaseTransaction) -> Result<DatabaseTransaction, DbErr> {
|
||||
let builder = transaction.get_database_backend();
|
||||
// Remove duplicate memberships.
|
||||
#[derive(FromQueryResult)]
|
||||
#[allow(dead_code)]
|
||||
struct MembershipInfo {
|
||||
user_id: UserId,
|
||||
group_id: GroupId,
|
||||
cnt: i64,
|
||||
}
|
||||
let mut delete_queries = MembershipInfo::find_by_statement(
|
||||
pool.execute(
|
||||
builder.build(
|
||||
Query::select()
|
||||
.from(Memberships::Table)
|
||||
.columns([Memberships::UserId, Memberships::GroupId])
|
||||
.expr_as(
|
||||
Expr::count(Expr::col((Memberships::Table, Memberships::UserId))),
|
||||
Alias::new("cnt"),
|
||||
)
|
||||
.group_by_columns([Memberships::UserId, Memberships::GroupId])
|
||||
.cond_having(all![SimpleExpr::Binary(
|
||||
Box::new(Expr::col((Memberships::Table, Memberships::UserId)).count()),
|
||||
BinOper::GreaterThan,
|
||||
Box::new(SimpleExpr::Value(1.into()))
|
||||
)]),
|
||||
Index::create()
|
||||
.if_not_exists()
|
||||
.name("unique-user-uuid")
|
||||
.table(Users::Table)
|
||||
.col(Users::Uuid)
|
||||
.unique(),
|
||||
),
|
||||
)
|
||||
.all(&transaction)
|
||||
.await?
|
||||
.into_iter()
|
||||
.map(
|
||||
|MembershipInfo {
|
||||
user_id,
|
||||
group_id,
|
||||
cnt,
|
||||
}| {
|
||||
builder
|
||||
.build(
|
||||
Query::delete()
|
||||
.from_table(Memberships::Table)
|
||||
.cond_where(all![
|
||||
Expr::col(Memberships::UserId).eq(user_id),
|
||||
Expr::col(Memberships::GroupId).eq(group_id)
|
||||
])
|
||||
.limit(cnt as u64 - 1),
|
||||
)
|
||||
.to_owned()
|
||||
},
|
||||
.await
|
||||
.context("while enforcing unicity on user UUIDs (2 users have the same UUID)")?;
|
||||
pool.execute(
|
||||
builder.build(
|
||||
Index::create()
|
||||
.if_not_exists()
|
||||
.name("unique-group-uuid")
|
||||
.table(Groups::Table)
|
||||
.col(Groups::Uuid)
|
||||
.unique(),
|
||||
),
|
||||
)
|
||||
.peekable();
|
||||
if delete_queries.peek().is_some() {
|
||||
match transaction.get_database_backend() {
|
||||
sea_orm::DatabaseBackend::Sqlite => {
|
||||
return Err(DbErr::Migration(format!(
|
||||
"The Sqlite driver does not support LIMIT in DELETE. Run these queries manually:\n{}" , delete_queries.map(|s| s.to_string()).join("\n"))));
|
||||
}
|
||||
sea_orm::DatabaseBackend::MySql | sea_orm::DatabaseBackend::Postgres => {
|
||||
for query in delete_queries {
|
||||
transaction.execute(query).await?;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
transaction
|
||||
.execute(
|
||||
builder.build(
|
||||
Index::create()
|
||||
.if_not_exists()
|
||||
.name("unique-memberships")
|
||||
.table(Memberships::Table)
|
||||
.col(Memberships::UserId)
|
||||
.col(Memberships::GroupId)
|
||||
.unique(),
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
Ok(transaction)
|
||||
.await
|
||||
.context("while enforcing unicity on group UUIDs (2 groups have the same UUID)")?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// This is needed to make an array of async functions.
|
||||
macro_rules! to_sync {
|
||||
($l:ident) => {
|
||||
move |transaction| -> std::pin::Pin<
|
||||
Box<dyn std::future::Future<Output = Result<DatabaseTransaction, DbErr>>>,
|
||||
> { Box::pin($l(transaction)) }
|
||||
|pool| -> std::pin::Pin<Box<dyn std::future::Future<Output = anyhow::Result<()>>>> {
|
||||
Box::pin($l(pool))
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1055,29 +579,21 @@ pub async fn migrate_from_version(
|
||||
to_sync!(migrate_to_v2),
|
||||
to_sync!(migrate_to_v3),
|
||||
to_sync!(migrate_to_v4),
|
||||
to_sync!(migrate_to_v5),
|
||||
to_sync!(migrate_to_v6),
|
||||
to_sync!(migrate_to_v7),
|
||||
to_sync!(migrate_to_v8),
|
||||
];
|
||||
assert_eq!(migrations.len(), (LAST_SCHEMA_VERSION.0 - 1) as usize);
|
||||
for migration in 2..=last_version.0 {
|
||||
for migration in 2..=4 {
|
||||
if version < SchemaVersion(migration) && SchemaVersion(migration) <= last_version {
|
||||
info!("Upgrading DB schema to version {}", migration);
|
||||
let transaction = pool.begin().await?;
|
||||
let transaction = migrations[(migration - 2) as usize](transaction).await?;
|
||||
let builder = transaction.get_database_backend();
|
||||
transaction
|
||||
.execute(
|
||||
builder.build(
|
||||
Query::update()
|
||||
.table(Metadata::Table)
|
||||
.value(Metadata::Version, Value::from(migration)),
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
transaction.commit().await?;
|
||||
migrations[(migration - 2) as usize](pool).await?;
|
||||
}
|
||||
}
|
||||
let builder = pool.get_database_backend();
|
||||
pool.execute(
|
||||
builder.build(
|
||||
Query::update()
|
||||
.table(Metadata::Table)
|
||||
.value(Metadata::Version, Value::from(last_version)),
|
||||
),
|
||||
)
|
||||
.await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -15,7 +15,7 @@ use tracing::{debug, instrument};
|
||||
|
||||
type SqlOpaqueHandler = SqlBackendHandler;
|
||||
|
||||
#[instrument(skip_all, level = "debug", err, fields(username = %username.as_str()))]
|
||||
#[instrument(skip_all, level = "debug", err)]
|
||||
fn passwords_match(
|
||||
password_file_bytes: &[u8],
|
||||
clear_password: &str,
|
||||
@@ -33,7 +33,7 @@ fn passwords_match(
|
||||
server_setup,
|
||||
Some(password_file),
|
||||
client_login_start_result.message,
|
||||
username,
|
||||
username.as_str(),
|
||||
)?;
|
||||
client::login::finish_login(
|
||||
client_login_start_result.state,
|
||||
@@ -49,7 +49,7 @@ impl SqlBackendHandler {
|
||||
)?)
|
||||
}
|
||||
|
||||
#[instrument(skip(self), level = "debug", err)]
|
||||
#[instrument(skip_all, level = "debug", err)]
|
||||
async fn get_password_file_for_user(&self, user_id: UserId) -> Result<Option<Vec<u8>>> {
|
||||
// Fetch the previously registered password file from the DB.
|
||||
Ok(model::User::find_by_id(user_id)
|
||||
@@ -100,13 +100,15 @@ impl OpaqueHandler for SqlOpaqueHandler {
|
||||
&self,
|
||||
request: login::ClientLoginStartRequest,
|
||||
) -> Result<login::ServerLoginStartResponse> {
|
||||
let user_id = request.username;
|
||||
let maybe_password_file = self
|
||||
.get_password_file_for_user(user_id.clone())
|
||||
.get_password_file_for_user(UserId::new(&request.username))
|
||||
.await?
|
||||
.map(|bytes| {
|
||||
opaque::server::ServerRegistration::deserialize(&bytes).map_err(|_| {
|
||||
DomainError::InternalError(format!("Corrupted password file for {}", &user_id))
|
||||
DomainError::InternalError(format!(
|
||||
"Corrupted password file for {}",
|
||||
&request.username
|
||||
))
|
||||
})
|
||||
})
|
||||
.transpose()?;
|
||||
@@ -118,11 +120,11 @@ impl OpaqueHandler for SqlOpaqueHandler {
|
||||
self.config.get_server_setup(),
|
||||
maybe_password_file,
|
||||
request.login_start_request,
|
||||
&user_id,
|
||||
&request.username,
|
||||
)?;
|
||||
let secret_key = self.get_orion_secret_key()?;
|
||||
let server_data = login::ServerData {
|
||||
username: user_id,
|
||||
username: request.username,
|
||||
server_login: start_response.state,
|
||||
};
|
||||
let encrypted_state = orion::aead::seal(&secret_key, &bincode::serialize(&server_data)?)?;
|
||||
@@ -149,7 +151,7 @@ impl OpaqueHandler for SqlOpaqueHandler {
|
||||
opaque::server::login::finish_login(server_login, request.credential_finalization)?
|
||||
.session_key;
|
||||
|
||||
Ok(username)
|
||||
Ok(UserId::new(&username))
|
||||
}
|
||||
|
||||
#[instrument(skip_all, level = "debug", err)]
|
||||
@@ -189,7 +191,7 @@ impl OpaqueHandler for SqlOpaqueHandler {
|
||||
opaque::server::registration::get_password_file(request.registration_upload);
|
||||
// Set the user password to the new password.
|
||||
let user_update = model::users::ActiveModel {
|
||||
user_id: ActiveValue::Set(username),
|
||||
user_id: ActiveValue::Set(UserId::new(&username)),
|
||||
password_hash: ActiveValue::Set(Some(password_file.serialize())),
|
||||
..Default::default()
|
||||
};
|
||||
@@ -199,19 +201,19 @@ impl OpaqueHandler for SqlOpaqueHandler {
|
||||
}
|
||||
|
||||
/// Convenience function to set a user's password.
|
||||
#[instrument(skip_all, level = "debug", err, fields(username = %username.as_str()))]
|
||||
#[instrument(skip_all, level = "debug", err)]
|
||||
pub(crate) async fn register_password(
|
||||
opaque_handler: &SqlOpaqueHandler,
|
||||
username: UserId,
|
||||
username: &UserId,
|
||||
password: &SecUtf8,
|
||||
) -> Result<()> {
|
||||
let mut rng = rand::rngs::OsRng;
|
||||
use registration::*;
|
||||
let registration_start =
|
||||
opaque::client::registration::start_registration(password.unsecure().as_bytes(), &mut rng)?;
|
||||
opaque::client::registration::start_registration(password.unsecure(), &mut rng)?;
|
||||
let start_response = opaque_handler
|
||||
.registration_start(ClientRegistrationStartRequest {
|
||||
username,
|
||||
username: username.to_string(),
|
||||
registration_start_request: registration_start.message,
|
||||
})
|
||||
.await?;
|
||||
@@ -243,7 +245,7 @@ mod tests {
|
||||
let login_start = opaque::client::login::start_login(password, &mut rng)?;
|
||||
let start_response = opaque_handler
|
||||
.login_start(ClientLoginStartRequest {
|
||||
username: UserId::new(username),
|
||||
username: username.to_string(),
|
||||
login_start_request: login_start.message,
|
||||
})
|
||||
.await?;
|
||||
@@ -274,7 +276,7 @@ mod tests {
|
||||
.unwrap_err();
|
||||
register_password(
|
||||
&opaque_handler,
|
||||
UserId::new("bob"),
|
||||
&UserId::new("bob"),
|
||||
&secstr::SecUtf8::from("bob00"),
|
||||
)
|
||||
.await?;
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user