From 14531fa258ea698cefa46b0fb4ce2e1993ddca7d Mon Sep 17 00:00:00 2001
From: Austin Alvarado
Date: Wed, 4 Jan 2023 00:24:40 -0700
Subject: [PATCH 01/62] docker: upgrade alpine in base dockerfile
This allows us to upgrade rustc to past 1.65, which is required by sea-orm.
---
Dockerfile | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/Dockerfile b/Dockerfile
index 6eda8dc..8eac2d9 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,5 +1,5 @@
# Build image
-FROM rust:alpine3.14 AS chef
+FROM rust:alpine3.16 AS chef
RUN set -x \
# Add user
@@ -41,7 +41,7 @@ RUN cargo build --release -p lldap -p migration-tool \
&& ./app/build.sh
# Final image
-FROM alpine:3.14
+FROM alpine:3.16
ENV GOSU_VERSION 1.14
# Fetch gosu from git
From d7cc10fa006765330ab2c06fd7998cd766394f21 Mon Sep 17 00:00:00 2001
From: Dedy Martadinata S
Date: Thu, 5 Jan 2023 21:36:01 +0700
Subject: [PATCH 02/62] ci: fetch missing web components
---
.github/workflows/docker-build-static.yml | 11 +++++++++--
1 file changed, 9 insertions(+), 2 deletions(-)
diff --git a/.github/workflows/docker-build-static.yml b/.github/workflows/docker-build-static.yml
index 56d417c..fa92ff3 100644
--- a/.github/workflows/docker-build-static.yml
+++ b/.github/workflows/docker-build-static.yml
@@ -465,6 +465,13 @@ jobs:
path: web
- name: Web Cleanup
run: mkdir app && mv web/index.html app/index.html && mv web/static app/static && mv web/pkg app/pkg
+ - name: Fetch web components
+ run: |
+ sudo apt update
+ sudo apt install wget
+ for file in $(cat app/static/libraries.txt); do wget -P app/static "$file"; done
+ for file in $(cat app/static/fonts/fonts.txt); do wget -P app/static/fonts "$file"; done
+ chmod a+r -R .
- name: compress web
run: sudo apt update && sudo apt install -y zip && zip -r web.zip app/
@@ -474,12 +481,12 @@ jobs:
id: create_release
with:
allowUpdates: true
- artifacts: "bin/armhf-bin/lldap-armhf,
+ artifacts: bin/armhf-bin/lldap-armhf,
bin/aarch64-bin/lldap-aarch64,
bin/amd64-bin/lldap-amd64,
bin/armhf-bin/migration-tool-armhf,
bin/aarch64-bin/migration-tool-aarch64,
bin/amd64-bin/migration-tool-amd64,
- web.zip"
+ web.zip
env:
GITHUB_TOKEN: ${{ github.token }}
From c87adfeeccc703826d641dfb8ffc459026829608 Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Fri, 6 Jan 2023 13:13:54 +0100
Subject: [PATCH 03/62] build(deps): bump actions/checkout from 3.2.0 to 3.3.0
(#410)
Bumps [actions/checkout](https://github.com/actions/checkout) from 3.2.0 to 3.3.0.
- [Release notes](https://github.com/actions/checkout/releases)
- [Changelog](https://github.com/actions/checkout/blob/main/CHANGELOG.md)
- [Commits](https://github.com/actions/checkout/compare/v3.2.0...v3.3.0)
---
updated-dependencies:
- dependency-name: actions/checkout
dependency-type: direct:production
update-type: version-update:semver-minor
...
Signed-off-by: dependabot[bot]
Signed-off-by: dependabot[bot]
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
---
.github/workflows/docker-build-static.yml | 12 ++++++------
.github/workflows/rust.yml | 8 ++++----
2 files changed, 10 insertions(+), 10 deletions(-)
diff --git a/.github/workflows/docker-build-static.yml b/.github/workflows/docker-build-static.yml
index fa92ff3..f7eac75 100644
--- a/.github/workflows/docker-build-static.yml
+++ b/.github/workflows/docker-build-static.yml
@@ -80,7 +80,7 @@ jobs:
restore-keys: |
lldap-ui-
- name: Checkout repository
- uses: actions/checkout@v3.2.0
+ uses: actions/checkout@v3.3.0
- name: install rollup nodejs
run: npm install -g rollup
- name: install wasm-pack with cargo
@@ -119,7 +119,7 @@ jobs:
- name: smoke test
run: rustc --version
- name: Checkout repository
- uses: actions/checkout@v3.2.0
+ uses: actions/checkout@v3.3.0
- uses: actions/cache@v3
with:
path: |
@@ -164,11 +164,11 @@ jobs:
CARGO_HOME: ${GITHUB_WORKSPACE}/.cargo
steps:
- name: Checkout repository
- uses: actions/checkout@v3.2.0
+ uses: actions/checkout@v3.3.0
- name: smoke test
run: rustc --version
- name: Checkout repository
- uses: actions/checkout@v3.2.0
+ uses: actions/checkout@v3.3.0
- uses: actions/cache@v3
with:
path: |
@@ -214,7 +214,7 @@ jobs:
CARGO_TARGET_X86_64_UNKNOWN_LINUX_MUSL_LINKER: x86_64-linux-musl-gcc
steps:
- name: Checkout repository
- uses: actions/checkout@v3.2.0
+ uses: actions/checkout@v3.3.0
- uses: actions/cache@v3
with:
path: |
@@ -262,7 +262,7 @@ jobs:
- name: install rsync
run: sudo apt update && sudo apt install -y rsync
- name: fetch repo
- uses: actions/checkout@v3.2.0
+ uses: actions/checkout@v3.3.0
- name: Download armhf lldap artifacts
uses: actions/download-artifact@v3
diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml
index 0825b37..a5952d1 100644
--- a/.github/workflows/rust.yml
+++ b/.github/workflows/rust.yml
@@ -34,7 +34,7 @@ jobs:
steps:
- name: Checkout sources
- uses: actions/checkout@v3.2.0
+ uses: actions/checkout@v3.3.0
- uses: Swatinem/rust-cache@v2
- name: Build
run: cargo build --verbose --workspace
@@ -53,7 +53,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout sources
- uses: actions/checkout@v3.2.0
+ uses: actions/checkout@v3.3.0
- uses: Swatinem/rust-cache@v2
@@ -70,7 +70,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout sources
- uses: actions/checkout@v3.2.0
+ uses: actions/checkout@v3.3.0
- uses: Swatinem/rust-cache@v2
@@ -87,7 +87,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Checkout sources
- uses: actions/checkout@v3.2.0
+ uses: actions/checkout@v3.3.0
- name: Install Rust
run: rustup toolchain install nightly --component llvm-tools-preview && rustup component add llvm-tools-preview --toolchain stable-x86_64-unknown-linux-gnu
From 3a43b7a4c2156b603c9d98508828e048d6ed5009 Mon Sep 17 00:00:00 2001
From: Dedy Martadinata S
Date: Fri, 6 Jan 2023 22:34:22 +0700
Subject: [PATCH 04/62] docker: simplify ci and better package release
artifacts
---
.github/workflows/Dockerfile.ci.alpine | 12 +-
.github/workflows/Dockerfile.ci.debian | 12 +-
.github/workflows/docker-build-static.yml | 193 ++++++++--------------
3 files changed, 78 insertions(+), 139 deletions(-)
diff --git a/.github/workflows/Dockerfile.ci.alpine b/.github/workflows/Dockerfile.ci.alpine
index 0d074c3..47f778f 100644
--- a/.github/workflows/Dockerfile.ci.alpine
+++ b/.github/workflows/Dockerfile.ci.alpine
@@ -10,8 +10,8 @@ RUN mkdir -p target/
RUN mkdir -p /lldap/app
RUN if [ "${TARGETPLATFORM}" = "linux/amd64" ]; then \
- mv bin/amd64-bin/lldap target/lldap && \
- mv bin/amd64-bin/migration-tool target/migration-tool && \
+ mv bin/amd64-lldap-bin/lldap target/lldap && \
+ mv bin/amd64-migration-tool-bin/migration-tool target/migration-tool && \
chmod +x target/lldap && \
chmod +x target/migration-tool && \
ls -la target/ . && \
@@ -19,8 +19,8 @@ RUN if [ "${TARGETPLATFORM}" = "linux/amd64" ]; then \
; fi
RUN if [ "${TARGETPLATFORM}" = "linux/arm64" ]; then \
- mv bin/aarch64-bin/lldap target/lldap && \
- mv bin/aarch64-bin/migration-tool target/migration-tool && \
+ mv bin/aarch64-lldap-bin/lldap target/lldap && \
+ mv bin/aarch64-migration-tool-bin/migration-tool target/migration-tool && \
chmod +x target/lldap && \
chmod +x target/migration-tool && \
ls -la target/ . && \
@@ -28,8 +28,8 @@ RUN if [ "${TARGETPLATFORM}" = "linux/arm64" ]; then \
; fi
RUN if [ "${TARGETPLATFORM}" = "linux/arm/v7" ]; then \
- mv bin/armhf-bin/lldap target/lldap && \
- mv bin/armhf-bin/migration-tool target/migration-tool && \
+ mv bin/armhf-lldap-bin/lldap target/lldap && \
+ mv bin/armhf-migration-tool-bin/migration-tool target/migration-tool && \
chmod +x target/lldap && \
chmod +x target/migration-tool && \
ls -la target/ . && \
diff --git a/.github/workflows/Dockerfile.ci.debian b/.github/workflows/Dockerfile.ci.debian
index 03cdbfc..3f7c45b 100644
--- a/.github/workflows/Dockerfile.ci.debian
+++ b/.github/workflows/Dockerfile.ci.debian
@@ -10,8 +10,8 @@ RUN mkdir -p target/
RUN mkdir -p /lldap/app
RUN if [ "${TARGETPLATFORM}" = "linux/amd64" ]; then \
- mv bin/amd64-bin/lldap target/lldap && \
- mv bin/amd64-bin/migration-tool target/migration-tool && \
+ mv bin/amd64-lldap-bin/lldap target/lldap && \
+ mv bin/amd64-migration-tool-bin/migration-tool target/migration-tool && \
chmod +x target/lldap && \
chmod +x target/migration-tool && \
ls -la target/ . && \
@@ -19,8 +19,8 @@ RUN if [ "${TARGETPLATFORM}" = "linux/amd64" ]; then \
; fi
RUN if [ "${TARGETPLATFORM}" = "linux/arm64" ]; then \
- mv bin/aarch64-bin/lldap target/lldap && \
- mv bin/aarch64-bin/migration-tool target/migration-tool && \
+ mv bin/aarch64-lldap-bin/lldap target/lldap && \
+ mv bin/aarch64-migration-tool-bin/migration-tool target/migration-tool && \
chmod +x target/lldap && \
chmod +x target/migration-tool && \
ls -la target/ . && \
@@ -28,8 +28,8 @@ RUN if [ "${TARGETPLATFORM}" = "linux/arm64" ]; then \
; fi
RUN if [ "${TARGETPLATFORM}" = "linux/arm/v7" ]; then \
- mv bin/armhf-bin/lldap target/lldap && \
- mv bin/armhf-bin/migration-tool target/migration-tool && \
+ mv bin/armhf-lldap-bin/lldap target/lldap && \
+ mv bin/armhf-migration-tool-bin/migration-tool target/migration-tool && \
chmod +x target/lldap && \
chmod +x target/migration-tool && \
ls -la target/ . && \
diff --git a/.github/workflows/docker-build-static.yml b/.github/workflows/docker-build-static.yml
index f7eac75..cc8146c 100644
--- a/.github/workflows/docker-build-static.yml
+++ b/.github/workflows/docker-build-static.yml
@@ -22,16 +22,19 @@ env:
# In total 5 jobs, all the jobs are containerized
# ---
+#######################################################################################
+# GitHub actions randomly timeout when downloading musl-gcc #
+# Using lldap dev image based on https://hub.docker.com/_/rust and musl-gcc bundled #
+# Look into .github/workflows/Dockerfile.dev for development image details #
+#######################################################################################
+
# build-ui , create/compile the web
-## Use rustlang/rust:nighlty image
-### Install nodejs from nodesource repo
### install wasm
### install rollup
### run app/build.sh
### upload artifacts
# builds-armhf, build-aarch64, build-amd64 create binary for respective arch
-## Use rustlang/rust:nightly image
### Add non-native architecture dpkg --add-architecture XXX
### Install dev tool gcc g++, etc. per respective arch
### Cargo build
@@ -44,30 +47,16 @@ env:
# build-ui,builds-armhf, build-aarch64, build-amd64 will upload artifacts will be used next job
# build-docker-image job will fetch artifacts and run Dockerfile.ci then push the image.
-# On current https://hub.docker.com/_/rust
-# 1-bullseye, 1.61-bullseye, 1.61.0-bullseye, bullseye, 1, 1.61, 1.61.0, latest
-
-# cache
-## cargo
-## target
+# cache based on Cargo.lock
jobs:
build-ui:
runs-on: ubuntu-latest
container:
- image: rust:1.65
- env:
- CARGO_TERM_COLOR: always
- RUSTFLAGS: -Ctarget-feature=+crt-static
+ image: nitnelave/rust-dev:latest
steps:
- - name: install runtime
- run: apt update && apt install -y gcc-x86-64-linux-gnu g++-x86-64-linux-gnu libc6-dev ca-certificates
- - name: setup node repo LTS
- run: curl -fsSL https://deb.nodesource.com/setup_lts.x | bash -
- - name: install nodejs
- run: apt install -y nodejs && npm -g install npm
- - name: smoke test
- run: rustc --version
+ - name: Checkout repository
+ uses: actions/checkout@v3.3.0
- uses: actions/cache@v3
with:
path: |
@@ -79,10 +68,10 @@ jobs:
key: lldap-ui-${{ hashFiles('**/Cargo.lock') }}
restore-keys: |
lldap-ui-
- - name: Checkout repository
- uses: actions/checkout@v3.3.0
- name: install rollup nodejs
run: npm install -g rollup
+ - name: add wasm target
+ run: rustup target add wasm32-unknown-unknown
- name: install wasm-pack with cargo
run: cargo install wasm-pack || true
env:
@@ -100,7 +89,7 @@ jobs:
build-armhf:
runs-on: ubuntu-latest
container:
- image: rust:1.65
+ image: nitnelave/rust-dev:latest
env:
CARGO_TARGET_ARMV7_UNKNOWN_LINUX_GNUEABIHF_LINKER: arm-linux-gnueabihf-gcc
CARGO_TARGET_ARMV7_UNKNOWN_LINUX_MUSLEABIHF_LINKER: arm-linux-gnueabihf-ld
@@ -112,12 +101,8 @@ jobs:
run: dpkg --add-architecture armhf
- name: install runtime
run: apt update && apt install -y gcc-arm-linux-gnueabihf g++-arm-linux-gnueabihf libc6-armhf-cross libc6-dev-armhf-cross tar ca-certificates
- - name: smoke test
- run: rustc --version
- name: add armhf target
run: rustup target add armv7-unknown-linux-gnueabihf
- - name: smoke test
- run: rustc --version
- name: Checkout repository
uses: actions/checkout@v3.3.0
- uses: actions/cache@v3
@@ -150,12 +135,6 @@ jobs:
build-aarch64:
runs-on: ubuntu-latest
container:
-##################################################################################
-# GitHub actions currently timeout when downloading musl-gcc #
-# Using lldap dev image based on rust:1.65-slim-bullseye and musl-gcc bundled #
-# Only for Job build aarch64 and amd64 #
-###################################################################################
- #image: rust:1.65
image: nitnelave/rust-dev:latest
env:
CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_LINKER: aarch64-linux-musl-gcc
@@ -163,10 +142,6 @@ jobs:
RUSTFLAGS: -Ctarget-feature=+crt-static
CARGO_HOME: ${GITHUB_WORKSPACE}/.cargo
steps:
- - name: Checkout repository
- uses: actions/checkout@v3.3.0
- - name: smoke test
- run: rustc --version
- name: Checkout repository
uses: actions/checkout@v3.3.0
- uses: actions/cache@v3
@@ -205,7 +180,6 @@ jobs:
build-amd64:
runs-on: ubuntu-latest
container:
-# image: rust:1.65
image: nitnelave/rust-dev:latest
env:
CARGO_TERM_COLOR: always
@@ -263,42 +237,10 @@ jobs:
run: sudo apt update && sudo apt install -y rsync
- name: fetch repo
uses: actions/checkout@v3.3.0
-
- - name: Download armhf lldap artifacts
+ - name: Download All Artifacts
uses: actions/download-artifact@v3
with:
- name: armhf-lldap-bin
- path: bin/armhf-bin
- - name: Download armhf migration-tool artifacts
- uses: actions/download-artifact@v3
- with:
- name: armhf-migration-tool-bin
- path: bin/armhf-bin
-
- - name: Download aarch64 lldap artifacts
- uses: actions/download-artifact@v3
- with:
- name: aarch64-lldap-bin
- path: bin/aarch64-bin
- - name: Download aarch64 migration-tool artifacts
- uses: actions/download-artifact@v3
- with:
- name: aarch64-migration-tool-bin
- path: bin/aarch64-bin
-
- - name: Download amd64 lldap artifacts
- uses: actions/download-artifact@v3
- with:
- name: amd64-lldap-bin
- path: bin/amd64-bin
- - name: Download amd64 migration-tool artifacts
- uses: actions/download-artifact@v3
- with:
- name: amd64-migration-tool-bin
- path: bin/amd64-bin
-
- - name: check bin path
- run: ls -al bin/
+ path: bin
- name: Download llap ui artifacts
uses: actions/download-artifact@v3
@@ -326,7 +268,7 @@ jobs:
type=semver,pattern={{major}}
type=sha
- name: Cache Docker layers
- uses: actions/cache@v2
+ uses: actions/cache@v3
with:
path: /tmp/.buildx-cache
key: ${{ runner.os }}-buildx-${{ github.sha }}
@@ -344,9 +286,9 @@ jobs:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
-######################
-#### latest build ####
-######################
+########################################
+#### docker image :latest tag build ####
+########################################
- name: Build and push latest alpine
if: github.event_name != 'release'
uses: docker/build-push-action@v3
@@ -371,9 +313,9 @@ jobs:
cache-from: type=local,src=/tmp/.buildx-cache
cache-to: type=local,dest=/tmp/.buildx-cache-new
-#######################
-#### release build ####
-#######################
+########################################
+#### docker image :semver tag build ####
+########################################
- name: Build and push release alpine
if: github.event_name == 'release'
uses: docker/build-push-action@v3
@@ -411,52 +353,31 @@ jobs:
password: ${{ secrets.DOCKERHUB_PASSWORD }}
repository: nitnelave/lldap
-
+###############################################################
+### Download artifacts, clean up ui, upload to release page ###
+###############################################################
create-release-artifacts:
needs: [build-ui,build-armhf,build-aarch64,build-amd64]
name: Create release artifacts
if: github.event_name == 'release'
runs-on: ubuntu-latest
steps:
-
- - name: Download armhf lldap artifacts
+ - name: Download All Artifacts
uses: actions/download-artifact@v3
with:
- name: armhf-lldap-bin
- path: bin/armhf-bin
- - name: Download armhf migration-tool artifacts
- uses: actions/download-artifact@v3
- with:
- name: armhf-migration-tool-bin
- path: bin/armhf-bin
- - name: Fix binary name armhf
- run: mv bin/armhf-bin/lldap bin/armhf-bin/lldap-armhf && mv bin/armhf-bin/migration-tool bin/armhf-bin/migration-tool-armhf
-
- - name: Download aarch64 lldap artifacts
- uses: actions/download-artifact@v3
- with:
- name: aarch64-lldap-bin
- path: bin/aarch64-bin
- - name: Download aarch64 migration-tool artifacts
- uses: actions/download-artifact@v3
- with:
- name: aarch64-migration-tool-bin
- path: bin/aarch64-bin
- - name: Fix binary name aarch64
- run: mv bin/aarch64-bin/lldap bin/aarch64-bin/lldap-aarch64 && mv bin/aarch64-bin/migration-tool bin/aarch64-bin/migration-tool-aarch64
-
- - name: Download amd64 lldap artifacts
- uses: actions/download-artifact@v3
- with:
- name: amd64-lldap-bin
- path: bin/amd64-bin
- - name: Download amd64 migration-tool artifacts
- uses: actions/download-artifact@v3
- with:
- name: amd64-migration-tool-bin
- path: bin/amd64-bin
- - name: Fix binary name amd64
- run: mv bin/amd64-bin/lldap bin/amd64-bin/lldap-amd64 && mv bin/amd64-bin/migration-tool bin/amd64-bin/migration-tool-amd64
+ path: bin/
+ - name: Check file
+ run: ls -alR bin/
+ - name: Fixing Filename
+ run: |
+ mv bin/aarch64-lldap-bin/lldap bin/aarch64-lldap
+ mv bin/amd64-lldap-bin/lldap bin/amd64-lldap
+ mv bin/armhf-lldap-bin/lldap bin/armhf-lldap
+ mv bin/aarch64-migration-tool-bin/migration-tool bin/aarch64-migration-tool
+ mv bin/amd64-migration-tool-bin/migration-tool bin/amd64-migration-tool
+ mv bin/armhf-migration-tool-bin/migration-tool bin/armhf-migration-tool
+ chmod +x bin/*-lldap
+ chmod +x bin/*-migration-tool
- name: Download llap ui artifacts
uses: actions/download-artifact@v3
@@ -472,8 +393,30 @@ jobs:
for file in $(cat app/static/libraries.txt); do wget -P app/static "$file"; done
for file in $(cat app/static/fonts/fonts.txt); do wget -P app/static/fonts "$file"; done
chmod a+r -R .
- - name: compress web
- run: sudo apt update && sudo apt install -y zip && zip -r web.zip app/
+
+ - name: Setup LLDAP dir for packing
+ run: |
+ mkdir aarch64-lldap
+ mkdir amd64-lldap
+ mkdir armhf-lldap
+ mv bin/aarch64-lldap aarch64-lldap/lldap
+ mv bin/amd64-lldap amd64-lldap/lldap
+ mv bin/armhf-lldap armhf-lldap/lldap
+ mv bin/aarch64-migration-tool aarch64-lldap/migration-tool
+ mv bin/amd64-migration-tool amd64-lldap/migration-tool
+ mv bin/armhf-migration-tool armhf-lldap/migration-tool
+ cp -r app aarch64-lldap/
+ cp -r app amd64-lldap/
+ cp -r app armhf-lldap/
+ ls -alR aarch64-lldap/
+ ls -alR amd64-lldap/
+ ls -alR armhf-lldap/
+
+ - name: Compress
+ run: |
+ tar -czvf aarch64-lldap.tar.gz aarch64-lldap/
+ tar -czvf amd64-lldap.tar.gz amd64-lldap/
+ tar -czvf armhf-lldap.tar.gz armhf-lldap/
- name: Upload artifacts release
@@ -481,12 +424,8 @@ jobs:
id: create_release
with:
allowUpdates: true
- artifacts: bin/armhf-bin/lldap-armhf,
- bin/aarch64-bin/lldap-aarch64,
- bin/amd64-bin/lldap-amd64,
- bin/armhf-bin/migration-tool-armhf,
- bin/aarch64-bin/migration-tool-aarch64,
- bin/amd64-bin/migration-tool-amd64,
- web.zip
+ artifacts: aarch64-lldap.tar.gz,
+ amd64-lldap.tar.gz,
+ armhf-lldap.tar.gz
env:
GITHUB_TOKEN: ${{ github.token }}
From 260b545a54d6ecd41e2cdd5ae4fdd50c0cf407f8 Mon Sep 17 00:00:00 2001
From: poVoq
Date: Mon, 9 Jan 2023 15:53:44 -0100
Subject: [PATCH 05/62] example_configs,gitea: add additional attributes and
group sync
Not extensively tested, but group/team sync seems to work in Forgejo.
---
example_configs/gitea.md | 25 +++++++++++++++++++++++--
1 file changed, 23 insertions(+), 2 deletions(-)
diff --git a/example_configs/gitea.md b/example_configs/gitea.md
index b2c8a54..654d76e 100644
--- a/example_configs/gitea.md
+++ b/example_configs/gitea.md
@@ -1,4 +1,4 @@
-# Configuration for Gitea
+# Configuration for Gitea (& Forgejo)
In Gitea, go to `Site Administration > Authentication Sources` and click `Add Authentication Source`
Select `LDAP (via BindDN)`
@@ -14,9 +14,30 @@ To log in they can either use their email address or user name. If you only want
For more info on the user filter, see: https://docs.gitea.io/en-us/authentication/#ldap-via-binddn
* Admin Filter: Use `(memberof=cn=lldap_admin,ou=groups,dc=example,dc=com)` if you want lldap admins to become Gitea admins. Leave empty otherwise.
* Username Attribute: `uid`
+* First Name Attribute: `givenName`
+* Surname Attribute: `sn`
* Email Attribute: `mail`
+* Avatar Attribute: `jpegPhoto`
* Check `Enable User Synchronization`
Replace every instance of `dc=example,dc=com` with your configured domain.
-After applying the above settings, users should be able to log in with either their user name or email address.
\ No newline at end of file
+After applying the above settings, users should be able to log in with either their user name or email address.
+
+## Syncronizing LDAP groups with existing teams in organisations
+
+Groups in LLDAP can be syncronized with teams in organisations. Organisations and teams must be created manually in Gitea.
+It is possible to syncronize one LDAP group with multiple teams in a Gitea organization.
+
+Check `Enable LDAP Groups`
+
+* Group Search Base DN: `ou=groups,dc=example,dc=com`
+* Group Attribute Containing List Of Users: `member`
+* User Attribute Listed In Group: `dn`
+* Map LDAP groups to Organization teams: `{"cn=Groupname1,ou=groups,dc=example,dc=com":{"Organization1": ["Teamname"]},"cn=Groupname2,ou=groups,dc=example,dc=com": {"Organization2": ["Teamname1", "Teamname2"]}}`
+
+Check `Remove Users from syncronised teams...`
+
+The `Map LDAP groups to Organization teams` config is JSON formatted and can be extended to as many groups as needed.
+
+Replace every instance of `dc=example,dc=com` with your configured domain.
From 692bbb00f1ba9b43a10e5cb6a0fcf01c002969ef Mon Sep 17 00:00:00 2001
From: Valentin Tolmer
Date: Fri, 13 Jan 2023 15:01:33 +0100
Subject: [PATCH 06/62] db: Change the version number from u8 to i16
This is the smallest integer compatible with all of MySQL, Postgres and
SQlite.
This is a backwards-compatible change for SQlite since both are
represented as "integer", and all u8 values can be represented as i16.
---
server/src/domain/sql_migrations.rs | 3 ++-
server/src/domain/sql_tables.rs | 4 ++--
2 files changed, 4 insertions(+), 3 deletions(-)
diff --git a/server/src/domain/sql_migrations.rs b/server/src/domain/sql_migrations.rs
index 62d9e59..ff5bb21 100644
--- a/server/src/domain/sql_migrations.rs
+++ b/server/src/domain/sql_migrations.rs
@@ -116,6 +116,7 @@ pub async fn upgrade_to_v1(pool: &DbConnection) -> std::result::Result<(), sea_o
.col(
ColumnDef::new(Groups::GroupId)
.integer()
+ .auto_increment()
.not_null()
.primary_key(),
)
@@ -309,7 +310,7 @@ pub async fn upgrade_to_v1(pool: &DbConnection) -> std::result::Result<(), sea_o
Table::create()
.table(Metadata::Table)
.if_not_exists()
- .col(ColumnDef::new(Metadata::Version).tiny_integer()),
+ .col(ColumnDef::new(Metadata::Version).small_integer()),
),
)
.await?;
diff --git a/server/src/domain/sql_tables.rs b/server/src/domain/sql_tables.rs
index af5615a..1bc4f77 100644
--- a/server/src/domain/sql_tables.rs
+++ b/server/src/domain/sql_tables.rs
@@ -4,7 +4,7 @@ use sea_orm::Value;
pub type DbConnection = sea_orm::DatabaseConnection;
#[derive(Copy, PartialEq, Eq, Debug, Clone)]
-pub struct SchemaVersion(pub u8);
+pub struct SchemaVersion(pub i16);
impl sea_orm::TryGetable for SchemaVersion {
fn try_get(
@@ -12,7 +12,7 @@ impl sea_orm::TryGetable for SchemaVersion {
pre: &str,
col: &str,
) -> Result {
- Ok(SchemaVersion(u8::try_get(res, pre, col)?))
+ Ok(SchemaVersion(i16::try_get(res, pre, col)?))
}
}
From e458aca3e39048e057763ed2fe32ed2fffb978ca Mon Sep 17 00:00:00 2001
From: Valentin Tolmer
Date: Fri, 13 Jan 2023 15:09:25 +0100
Subject: [PATCH 07/62] db: Change the DB storage type to NaiveDateTime
The entire internals of the server now work using only NaiveDateTime,
since we know they are all UTC. At the fringes (LDAP, GraphQL, JWT
tokens) we convert back into UTC to make sure we have a clear API.
This allows us to be compatible with Postgres (which doesn't support
DateTime, only NaiveDateTime).
This change is backwards compatible since in SQlite with
Sea-query/Sea-ORM, the UTC datetimes are stored without a timezone, as
simple strings. It's the same format as NaiveDateTime.
Fixes #87.
---
server/src/domain/handler.rs | 10 ++++++--
server/src/domain/ldap/user.rs | 6 ++++-
server/src/domain/model/groups.rs | 2 +-
.../src/domain/model/jwt_refresh_storage.rs | 2 +-
server/src/domain/model/jwt_storage.rs | 2 +-
.../src/domain/model/password_reset_tokens.rs | 2 +-
server/src/domain/model/users.rs | 2 +-
.../src/domain/sql_group_backend_handler.rs | 2 +-
server/src/domain/sql_migrations.rs | 4 +--
server/src/domain/sql_tables.rs | 4 +--
server/src/domain/sql_user_backend_handler.rs | 2 +-
server/src/domain/types.rs | 23 ++++++++++-------
server/src/infra/graphql/query.rs | 11 ++++----
server/src/infra/ldap_handler.rs | 25 +++++++++++--------
server/src/infra/sql_backend_handler.rs | 4 +--
15 files changed, 60 insertions(+), 41 deletions(-)
diff --git a/server/src/domain/handler.rs b/server/src/domain/handler.rs
index ef43e93..a39c256 100644
--- a/server/src/domain/handler.rs
+++ b/server/src/domain/handler.rs
@@ -140,8 +140,14 @@ mod tests {
fn test_uuid_time() {
use chrono::prelude::*;
let user_id = "bob";
- let date1 = Utc.with_ymd_and_hms(2014, 7, 8, 9, 10, 11).unwrap();
- let date2 = Utc.with_ymd_and_hms(2014, 7, 8, 9, 10, 12).unwrap();
+ let date1 = Utc
+ .with_ymd_and_hms(2014, 7, 8, 9, 10, 11)
+ .unwrap()
+ .naive_utc();
+ let date2 = Utc
+ .with_ymd_and_hms(2014, 7, 8, 9, 10, 12)
+ .unwrap()
+ .naive_utc();
assert_ne!(
Uuid::from_name_and_date(user_id, &date1),
Uuid::from_name_and_date(user_id, &date2)
diff --git a/server/src/domain/ldap/user.rs b/server/src/domain/ldap/user.rs
index 08f9853..6903aa4 100644
--- a/server/src/domain/ldap/user.rs
+++ b/server/src/domain/ldap/user.rs
@@ -1,3 +1,4 @@
+use chrono::TimeZone;
use ldap3_proto::{
proto::LdapOp, LdapFilter, LdapPartialAttribute, LdapResultCode, LdapSearchResultEntry,
};
@@ -49,7 +50,10 @@ fn get_user_attribute(
})
.collect(),
"cn" | "displayname" => vec![user.display_name.clone()?.into_bytes()],
- "createtimestamp" | "modifytimestamp" => vec![user.creation_date.to_rfc3339().into_bytes()],
+ "createtimestamp" | "modifytimestamp" => vec![chrono::Utc
+ .from_utc_datetime(&user.creation_date)
+ .to_rfc3339()
+ .into_bytes()],
"1.1" => return None,
// We ignore the operational attribute wildcard.
"+" => return None,
diff --git a/server/src/domain/model/groups.rs b/server/src/domain/model/groups.rs
index 748a61e..d9a74c8 100644
--- a/server/src/domain/model/groups.rs
+++ b/server/src/domain/model/groups.rs
@@ -11,7 +11,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub group_id: GroupId,
pub display_name: String,
- pub creation_date: chrono::DateTime,
+ pub creation_date: chrono::NaiveDateTime,
pub uuid: Uuid,
}
diff --git a/server/src/domain/model/jwt_refresh_storage.rs b/server/src/domain/model/jwt_refresh_storage.rs
index d7753ff..ebca1b1 100644
--- a/server/src/domain/model/jwt_refresh_storage.rs
+++ b/server/src/domain/model/jwt_refresh_storage.rs
@@ -11,7 +11,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub refresh_token_hash: i64,
pub user_id: UserId,
- pub expiry_date: chrono::DateTime,
+ pub expiry_date: chrono::NaiveDateTime,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
diff --git a/server/src/domain/model/jwt_storage.rs b/server/src/domain/model/jwt_storage.rs
index 6fc6a4e..6ca9208 100644
--- a/server/src/domain/model/jwt_storage.rs
+++ b/server/src/domain/model/jwt_storage.rs
@@ -11,7 +11,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub jwt_hash: i64,
pub user_id: UserId,
- pub expiry_date: chrono::DateTime,
+ pub expiry_date: chrono::NaiveDateTime,
pub blacklisted: bool,
}
diff --git a/server/src/domain/model/password_reset_tokens.rs b/server/src/domain/model/password_reset_tokens.rs
index 54b1bea..a252b36 100644
--- a/server/src/domain/model/password_reset_tokens.rs
+++ b/server/src/domain/model/password_reset_tokens.rs
@@ -11,7 +11,7 @@ pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub token: String,
pub user_id: UserId,
- pub expiry_date: chrono::DateTime,
+ pub expiry_date: chrono::NaiveDateTime,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
diff --git a/server/src/domain/model/users.rs b/server/src/domain/model/users.rs
index a9f1b02..32f8d86 100644
--- a/server/src/domain/model/users.rs
+++ b/server/src/domain/model/users.rs
@@ -18,7 +18,7 @@ pub struct Model {
pub first_name: Option,
pub last_name: Option,
pub avatar: Option,
- pub creation_date: chrono::DateTime,
+ pub creation_date: chrono::NaiveDateTime,
pub password_hash: Option>,
pub totp_secret: Option,
pub mfa_type: Option,
diff --git a/server/src/domain/sql_group_backend_handler.rs b/server/src/domain/sql_group_backend_handler.rs
index aaca7fe..5367090 100644
--- a/server/src/domain/sql_group_backend_handler.rs
+++ b/server/src/domain/sql_group_backend_handler.rs
@@ -116,7 +116,7 @@ impl GroupBackendHandler for SqlBackendHandler {
#[instrument(skip_all, level = "debug", ret, err)]
async fn create_group(&self, group_name: &str) -> Result {
debug!(?group_name);
- let now = chrono::Utc::now();
+ let now = chrono::Utc::now().naive_utc();
let uuid = Uuid::from_name_and_date(group_name, &now);
let new_group = model::groups::ActiveModel {
display_name: ActiveValue::Set(group_name.to_owned()),
diff --git a/server/src/domain/sql_migrations.rs b/server/src/domain/sql_migrations.rs
index ff5bb21..7efb152 100644
--- a/server/src/domain/sql_migrations.rs
+++ b/server/src/domain/sql_migrations.rs
@@ -170,7 +170,7 @@ pub async fn upgrade_to_v1(pool: &DbConnection) -> std::result::Result<(), sea_o
struct ShortGroupDetails {
group_id: GroupId,
display_name: String,
- creation_date: chrono::DateTime,
+ creation_date: chrono::NaiveDateTime,
}
for result in ShortGroupDetails::find_by_statement(
builder.build(
@@ -220,7 +220,7 @@ pub async fn upgrade_to_v1(pool: &DbConnection) -> std::result::Result<(), sea_o
#[derive(FromQueryResult)]
struct ShortUserDetails {
user_id: UserId,
- creation_date: chrono::DateTime,
+ creation_date: chrono::NaiveDateTime,
}
for result in ShortUserDetails::find_by_statement(
builder.build(
diff --git a/server/src/domain/sql_tables.rs b/server/src/domain/sql_tables.rs
index 1bc4f77..0f202b0 100644
--- a/server/src/domain/sql_tables.rs
+++ b/server/src/domain/sql_tables.rs
@@ -67,7 +67,7 @@ mod tests {
#[derive(FromQueryResult, PartialEq, Eq, Debug)]
struct ShortUserDetails {
display_name: String,
- creation_date: chrono::DateTime,
+ creation_date: chrono::NaiveDateTime,
}
let result = ShortUserDetails::find_by_statement(raw_statement(
r#"SELECT display_name, creation_date FROM users WHERE user_id = "bôb""#,
@@ -80,7 +80,7 @@ mod tests {
result,
ShortUserDetails {
display_name: "Bob Bobbersön".to_owned(),
- creation_date: Utc.timestamp_opt(0, 0).unwrap()
+ creation_date: Utc.timestamp_opt(0, 0).unwrap().naive_utc(),
}
);
}
diff --git a/server/src/domain/sql_user_backend_handler.rs b/server/src/domain/sql_user_backend_handler.rs
index dc7b99e..9220dff 100644
--- a/server/src/domain/sql_user_backend_handler.rs
+++ b/server/src/domain/sql_user_backend_handler.rs
@@ -158,7 +158,7 @@ impl UserBackendHandler for SqlBackendHandler {
#[instrument(skip_all, level = "debug", err)]
async fn create_user(&self, request: CreateUserRequest) -> Result<()> {
debug!(user_id = ?request.user_id);
- let now = chrono::Utc::now();
+ let now = chrono::Utc::now().naive_utc();
let uuid = Uuid::from_name_and_date(request.user_id.as_str(), &now);
let new_user = model::users::ActiveModel {
user_id: Set(request.user_id),
diff --git a/server/src/domain/types.rs b/server/src/domain/types.rs
index 76673d8..494f8f9 100644
--- a/server/src/domain/types.rs
+++ b/server/src/domain/types.rs
@@ -1,3 +1,4 @@
+use chrono::{NaiveDateTime, TimeZone};
use sea_orm::{
entity::IntoActiveValue,
sea_query::{value::ValueType, ArrayType, ColumnType, Nullable, ValueTypeErr},
@@ -7,18 +8,23 @@ use serde::{Deserialize, Serialize};
pub use super::model::{GroupColumn, UserColumn};
-pub type DateTime = chrono::DateTime;
-
#[derive(PartialEq, Hash, Eq, Clone, Debug, Default, Serialize, Deserialize)]
#[serde(try_from = "&str")]
pub struct Uuid(String);
impl Uuid {
- pub fn from_name_and_date(name: &str, creation_date: &DateTime) -> Self {
+ pub fn from_name_and_date(name: &str, creation_date: &NaiveDateTime) -> Self {
Uuid(
uuid::Uuid::new_v3(
&uuid::Uuid::NAMESPACE_X500,
- &[name.as_bytes(), creation_date.to_rfc3339().as_bytes()].concat(),
+ &[
+ name.as_bytes(),
+ chrono::Utc
+ .from_utc_datetime(creation_date)
+ .to_rfc3339()
+ .as_bytes(),
+ ]
+ .concat(),
)
.to_string(),
)
@@ -308,15 +314,14 @@ pub struct User {
pub first_name: Option,
pub last_name: Option,
pub avatar: Option,
- pub creation_date: DateTime,
+ pub creation_date: NaiveDateTime,
pub uuid: Uuid,
}
#[cfg(test)]
impl Default for User {
fn default() -> Self {
- use chrono::TimeZone;
- let epoch = chrono::Utc.timestamp_opt(0, 0).unwrap();
+ let epoch = chrono::Utc.timestamp_opt(0, 0).unwrap().naive_utc();
User {
user_id: UserId::default(),
email: String::new(),
@@ -373,7 +378,7 @@ impl TryFromU64 for GroupId {
pub struct Group {
pub id: GroupId,
pub display_name: String,
- pub creation_date: DateTime,
+ pub creation_date: NaiveDateTime,
pub uuid: Uuid,
pub users: Vec,
}
@@ -382,7 +387,7 @@ pub struct Group {
pub struct GroupDetails {
pub group_id: GroupId,
pub display_name: String,
- pub creation_date: DateTime,
+ pub creation_date: NaiveDateTime,
pub uuid: Uuid,
}
diff --git a/server/src/infra/graphql/query.rs b/server/src/infra/graphql/query.rs
index 03091f6..7c97050 100644
--- a/server/src/infra/graphql/query.rs
+++ b/server/src/infra/graphql/query.rs
@@ -3,6 +3,7 @@ use crate::domain::{
ldap::utils::map_user_field,
types::{GroupDetails, GroupId, UserColumn, UserId},
};
+use chrono::TimeZone;
use juniper::{graphql_object, FieldResult, GraphQLInputObject};
use serde::{Deserialize, Serialize};
use tracing::{debug, debug_span, Instrument};
@@ -230,7 +231,7 @@ impl User {
}
fn creation_date(&self) -> chrono::DateTime {
- self.user.creation_date
+ chrono::Utc.from_utc_datetime(&self.user.creation_date)
}
fn uuid(&self) -> &str {
@@ -275,7 +276,7 @@ impl From for User {
pub struct Group {
group_id: i32,
display_name: String,
- creation_date: chrono::DateTime,
+ creation_date: chrono::NaiveDateTime,
uuid: String,
members: Option>,
_phantom: std::marker::PhantomData>,
@@ -290,7 +291,7 @@ impl Group {
self.display_name.clone()
}
fn creation_date(&self) -> chrono::DateTime {
- self.creation_date
+ chrono::Utc.from_utc_datetime(&self.creation_date)
}
fn uuid(&self) -> String {
self.uuid.clone()
@@ -389,7 +390,7 @@ mod tests {
Ok(DomainUser {
user_id: UserId::new("bob"),
email: "bob@bobbers.on".to_string(),
- creation_date: chrono::Utc.timestamp_millis_opt(42).unwrap(),
+ creation_date: chrono::Utc.timestamp_millis_opt(42).unwrap().naive_utc(),
uuid: crate::uuid!("b1a2a3a4b1b2c1c2d1d2d3d4d5d6d7d8"),
..Default::default()
})
@@ -398,7 +399,7 @@ mod tests {
groups.insert(GroupDetails {
group_id: GroupId(3),
display_name: "Bobbersons".to_string(),
- creation_date: chrono::Utc.timestamp_nanos(42),
+ creation_date: chrono::Utc.timestamp_nanos(42).naive_utc(),
uuid: crate::uuid!("a1a2a3a4b1b2c1c2d1d2d3d4d5d6d7d8"),
});
mock.expect_get_user_groups()
diff --git a/server/src/infra/ldap_handler.rs b/server/src/infra/ldap_handler.rs
index 790d2e2..8ae3e1c 100644
--- a/server/src/infra/ldap_handler.rs
+++ b/server/src/infra/ldap_handler.rs
@@ -667,7 +667,7 @@ mod tests {
set.insert(GroupDetails {
group_id: GroupId(42),
display_name: group,
- creation_date: chrono::Utc.timestamp_opt(42, 42).unwrap(),
+ creation_date: chrono::Utc.timestamp_opt(42, 42).unwrap().naive_utc(),
uuid: uuid!("a1a2a3a4b1b2c1c2d1d2d3d4d5d6d7d8"),
});
Ok(set)
@@ -754,7 +754,7 @@ mod tests {
set.insert(GroupDetails {
group_id: GroupId(42),
display_name: "lldap_admin".to_string(),
- creation_date: chrono::Utc.timestamp_opt(42, 42).unwrap(),
+ creation_date: chrono::Utc.timestamp_opt(42, 42).unwrap().naive_utc(),
uuid: uuid!("a1a2a3a4b1b2c1c2d1d2d3d4d5d6d7d8"),
});
Ok(set)
@@ -841,7 +841,7 @@ mod tests {
groups: Some(vec![GroupDetails {
group_id: GroupId(42),
display_name: "rockstars".to_string(),
- creation_date: chrono::Utc.timestamp_opt(42, 42).unwrap(),
+ creation_date: chrono::Utc.timestamp_opt(42, 42).unwrap().naive_utc(),
uuid: uuid!("a1a2a3a4b1b2c1c2d1d2d3d4d5d6d7d8"),
}]),
}])
@@ -1006,7 +1006,10 @@ mod tests {
last_name: Some("Cricket".to_string()),
avatar: Some(JpegPhoto::for_tests()),
uuid: uuid!("04ac75e0-2900-3e21-926c-2f732c26b3fc"),
- creation_date: Utc.with_ymd_and_hms(2014, 7, 8, 9, 10, 11).unwrap(),
+ creation_date: Utc
+ .with_ymd_and_hms(2014, 7, 8, 9, 10, 11)
+ .unwrap()
+ .naive_utc(),
},
groups: None,
},
@@ -1135,14 +1138,14 @@ mod tests {
Group {
id: GroupId(1),
display_name: "group_1".to_string(),
- creation_date: chrono::Utc.timestamp_opt(42, 42).unwrap(),
+ creation_date: chrono::Utc.timestamp_opt(42, 42).unwrap().naive_utc(),
users: vec![UserId::new("bob"), UserId::new("john")],
uuid: uuid!("04ac75e0-2900-3e21-926c-2f732c26b3fc"),
},
Group {
id: GroupId(3),
display_name: "BestGroup".to_string(),
- creation_date: chrono::Utc.timestamp_opt(42, 42).unwrap(),
+ creation_date: chrono::Utc.timestamp_opt(42, 42).unwrap().naive_utc(),
users: vec![UserId::new("john")],
uuid: uuid!("04ac75e0-2900-3e21-926c-2f732c26b3fc"),
},
@@ -1228,7 +1231,7 @@ mod tests {
Ok(vec![Group {
display_name: "group_1".to_string(),
id: GroupId(1),
- creation_date: chrono::Utc.timestamp_opt(42, 42).unwrap(),
+ creation_date: chrono::Utc.timestamp_opt(42, 42).unwrap().naive_utc(),
users: vec![],
uuid: uuid!("04ac75e0-2900-3e21-926c-2f732c26b3fc"),
}])
@@ -1279,7 +1282,7 @@ mod tests {
Ok(vec![Group {
display_name: "group_1".to_string(),
id: GroupId(1),
- creation_date: chrono::Utc.timestamp_opt(42, 42).unwrap(),
+ creation_date: chrono::Utc.timestamp_opt(42, 42).unwrap().naive_utc(),
users: vec![],
uuid: uuid!("04ac75e0-2900-3e21-926c-2f732c26b3fc"),
}])
@@ -1555,7 +1558,7 @@ mod tests {
Ok(vec![Group {
id: GroupId(1),
display_name: "group_1".to_string(),
- creation_date: chrono::Utc.timestamp_opt(42, 42).unwrap(),
+ creation_date: chrono::Utc.timestamp_opt(42, 42).unwrap().naive_utc(),
users: vec![UserId::new("bob"), UserId::new("john")],
uuid: uuid!("04ac75e0-2900-3e21-926c-2f732c26b3fc"),
}])
@@ -1629,7 +1632,7 @@ mod tests {
Ok(vec![Group {
id: GroupId(1),
display_name: "group_1".to_string(),
- creation_date: chrono::Utc.timestamp_opt(42, 42).unwrap(),
+ creation_date: chrono::Utc.timestamp_opt(42, 42).unwrap().naive_utc(),
users: vec![UserId::new("bob"), UserId::new("john")],
uuid: uuid!("04ac75e0-2900-3e21-926c-2f732c26b3fc"),
}])
@@ -1962,7 +1965,7 @@ mod tests {
groups.insert(GroupDetails {
group_id: GroupId(0),
display_name: "lldap_admin".to_string(),
- creation_date: chrono::Utc.timestamp_opt(42, 42).unwrap(),
+ creation_date: chrono::Utc.timestamp_opt(42, 42).unwrap().naive_utc(),
uuid: uuid!("a1a2a3a4b1b2c1c2d1d2d3d4d5d6d7d8"),
});
mock.expect_get_user_groups()
diff --git a/server/src/infra/sql_backend_handler.rs b/server/src/infra/sql_backend_handler.rs
index 91741cc..253eca8 100644
--- a/server/src/infra/sql_backend_handler.rs
+++ b/server/src/infra/sql_backend_handler.rs
@@ -61,7 +61,7 @@ impl TcpBackendHandler for SqlBackendHandler {
let new_token = model::jwt_refresh_storage::Model {
refresh_token_hash: refresh_token_hash as i64,
user_id: user.clone(),
- expiry_date: chrono::Utc::now() + duration,
+ expiry_date: chrono::Utc::now().naive_utc() + duration,
}
.into_active_model();
new_token.insert(&self.sql_pool).await?;
@@ -131,7 +131,7 @@ impl TcpBackendHandler for SqlBackendHandler {
let new_token = model::password_reset_tokens::Model {
token: token.clone(),
user_id: user.clone(),
- expiry_date: chrono::Utc::now() + duration,
+ expiry_date: chrono::Utc::now().naive_utc() + duration,
}
.into_active_model();
new_token.insert(&self.sql_pool).await?;
From 955a559c21b8a0b15b382a9044984ac1a19daccf Mon Sep 17 00:00:00 2001
From: Valentin Tolmer
Date: Fri, 13 Jan 2023 15:28:58 +0100
Subject: [PATCH 08/62] clippy: fix warning
---
server/src/domain/sql_opaque_handler.rs | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/server/src/domain/sql_opaque_handler.rs b/server/src/domain/sql_opaque_handler.rs
index 1ba6e8d..b2ded01 100644
--- a/server/src/domain/sql_opaque_handler.rs
+++ b/server/src/domain/sql_opaque_handler.rs
@@ -133,7 +133,7 @@ impl OpaqueHandler for SqlOpaqueHandler {
let encrypted_state = orion::aead::seal(&secret_key, &bincode::serialize(&server_data)?)?;
Ok(login::ServerLoginStartResponse {
- server_data: base64::encode(&encrypted_state),
+ server_data: base64::encode(encrypted_state),
credential_response: start_response.message,
})
}
From f979e16b9543ee0312bb0c4ec57700be207e4001 Mon Sep 17 00:00:00 2001
From: Valentin Tolmer
Date: Mon, 16 Jan 2023 16:56:55 +0100
Subject: [PATCH 09/62] server: Fix healthcheck return code
The healthcheck was not returning a non-zero code when failing, due to
an extra layer of Results
---
server/src/infra/healthcheck.rs | 1 +
server/src/main.rs | 20 +++++++++++++-------
2 files changed, 14 insertions(+), 7 deletions(-)
diff --git a/server/src/infra/healthcheck.rs b/server/src/infra/healthcheck.rs
index 0fdd997..40089a0 100644
--- a/server/src/infra/healthcheck.rs
+++ b/server/src/infra/healthcheck.rs
@@ -99,6 +99,7 @@ fn get_tls_connector() -> Result {
#[instrument(skip_all, level = "info", err)]
pub async fn check_ldaps(ldaps_options: &LdapsOptions) -> Result<()> {
if !ldaps_options.enabled {
+ info!("LDAPS not enabled");
return Ok(());
};
let tls_connector = get_tls_connector()?;
diff --git a/server/src/main.rs b/server/src/main.rs
index 005e0ce..2c67bd9 100644
--- a/server/src/main.rs
+++ b/server/src/main.rs
@@ -158,6 +158,8 @@ fn run_healthcheck(opts: RunOpts) -> Result<()> {
.enable_all()
.build()?;
+ info!("Starting healthchecks");
+
use tokio::time::timeout;
let delay = Duration::from_millis(3000);
let (ldap, ldaps, api) = runtime.block_on(async {
@@ -168,14 +170,18 @@ fn run_healthcheck(opts: RunOpts) -> Result<()> {
)
});
- let mut failure = false;
- [ldap, ldaps, api]
+ let failure = [ldap, ldaps, api]
.into_iter()
- .filter_map(Result::err)
- .for_each(|e| {
- failure = true;
- error!("{:#}", e)
- });
+ .flat_map(|res| {
+ if let Err(e) = &res {
+ error!("Error running the health check: {:#}", e);
+ }
+ res
+ })
+ .any(|r| r.is_err());
+ if failure {
+ error!("Healthcheck failed");
+ }
std::process::exit(i32::from(failure))
}
From 807fd10d13a700c5d7507e2113748b95d6c7bf9e Mon Sep 17 00:00:00 2001
From: Luca Tagliavini
Date: Tue, 17 Jan 2023 14:21:57 +0100
Subject: [PATCH 10/62] server: Add support for DN filters
---
server/src/domain/ldap/group.rs | 16 +++++++++++++++-
server/src/domain/ldap/user.rs | 18 +++++++++++++++++-
server/src/infra/ldap_handler.rs | 10 ++++++++++
3 files changed, 42 insertions(+), 2 deletions(-)
diff --git a/server/src/domain/ldap/group.rs b/server/src/domain/ldap/group.rs
index 2ab5b64..2a4e37f 100644
--- a/server/src/domain/ldap/group.rs
+++ b/server/src/domain/ldap/group.rs
@@ -12,7 +12,8 @@ use crate::domain::{
use super::{
error::LdapResult,
utils::{
- expand_attribute_wildcards, get_user_id_from_distinguished_name, map_group_field, LdapInfo,
+ expand_attribute_wildcards, get_group_id_from_distinguished_name,
+ get_user_id_from_distinguished_name, map_group_field, LdapInfo,
},
};
@@ -126,6 +127,19 @@ fn convert_group_filter(
vec![],
)))),
},
+ "dn" => Ok(
+ match get_group_id_from_distinguished_name(
+ value.to_ascii_lowercase().as_str(),
+ &ldap_info.base_dn,
+ &ldap_info.base_dn_str,
+ ) {
+ Ok(value) => GroupRequestFilter::DisplayName(value),
+ Err(_) => {
+ warn!("Invalid dn filter on group: {}", value);
+ GroupRequestFilter::Not(Box::new(GroupRequestFilter::And(vec![])))
+ }
+ },
+ ),
_ => match map_group_field(field) {
Some(GroupColumn::DisplayName) => {
Ok(GroupRequestFilter::DisplayName(value.to_string()))
diff --git a/server/src/domain/ldap/user.rs b/server/src/domain/ldap/user.rs
index 6903aa4..20b06c9 100644
--- a/server/src/domain/ldap/user.rs
+++ b/server/src/domain/ldap/user.rs
@@ -6,7 +6,10 @@ use tracing::{debug, info, instrument, warn};
use crate::domain::{
handler::{BackendHandler, UserRequestFilter},
- ldap::{error::LdapError, utils::expand_attribute_wildcards},
+ ldap::{
+ error::LdapError,
+ utils::{expand_attribute_wildcards, get_user_id_from_distinguished_name},
+ },
types::{GroupDetails, User, UserColumn, UserId},
};
@@ -147,6 +150,19 @@ fn convert_user_filter(ldap_info: &LdapInfo, filter: &LdapFilter) -> LdapResult<
vec![],
)))),
},
+ "dn" => Ok(
+ match get_user_id_from_distinguished_name(
+ value.to_ascii_lowercase().as_str(),
+ &ldap_info.base_dn,
+ &ldap_info.base_dn_str,
+ ) {
+ Ok(value) => UserRequestFilter::UserId(value),
+ Err(_) => {
+ warn!("Invalid dn filter on user: {}", value);
+ UserRequestFilter::Not(Box::new(UserRequestFilter::And(vec![])))
+ }
+ },
+ ),
_ => match map_user_field(field) {
Some(UserColumn::UserId) => Ok(UserRequestFilter::UserId(UserId::new(value))),
Some(field) => Ok(UserRequestFilter::Equality(field, value.clone())),
diff --git a/server/src/infra/ldap_handler.rs b/server/src/infra/ldap_handler.rs
index 8ae3e1c..6287573 100644
--- a/server/src/infra/ldap_handler.rs
+++ b/server/src/infra/ldap_handler.rs
@@ -1217,6 +1217,7 @@ mod tests {
.with(eq(Some(GroupRequestFilter::And(vec![
GroupRequestFilter::DisplayName("group_1".to_string()),
GroupRequestFilter::Member(UserId::new("bob")),
+ GroupRequestFilter::DisplayName("rockstars".to_string()),
GroupRequestFilter::And(vec![]),
GroupRequestFilter::And(vec![]),
GroupRequestFilter::And(vec![]),
@@ -1245,6 +1246,10 @@ mod tests {
"uniqueMember".to_string(),
"uid=bob,ou=peopLe,Dc=eXample,dc=com".to_string(),
),
+ LdapFilter::Equality(
+ "dn".to_string(),
+ "uid=rockstars,ou=groups,dc=example,dc=com".to_string(),
+ ),
LdapFilter::Equality("obJEctclass".to_string(), "groupofUniqueNames".to_string()),
LdapFilter::Equality("objectclass".to_string(), "groupOfNames".to_string()),
LdapFilter::Present("objectclass".to_string()),
@@ -1403,6 +1408,7 @@ mod tests {
UserRequestFilter::Not(Box::new(UserRequestFilter::UserId(UserId::new(
"bob",
)))),
+ UserRequestFilter::UserId("bob_1".to_string().into()),
UserRequestFilter::And(vec![]),
UserRequestFilter::Not(Box::new(UserRequestFilter::And(vec![]))),
UserRequestFilter::And(vec![]),
@@ -1422,6 +1428,10 @@ mod tests {
"uid".to_string(),
"bob".to_string(),
))),
+ LdapFilter::Equality(
+ "dn".to_string(),
+ "uid=bob_1,ou=people,dc=example,dc=com".to_string(),
+ ),
LdapFilter::Equality("objectclass".to_string(), "persOn".to_string()),
LdapFilter::Equality("objectclass".to_string(), "other".to_string()),
LdapFilter::Present("objectClass".to_string()),
From 9018e6fa348ff77a96378ec6394797a4e0cc3e1a Mon Sep 17 00:00:00 2001
From: Valentin Tolmer
Date: Tue, 17 Jan 2023 14:43:37 +0100
Subject: [PATCH 11/62] server, refactor: Add a conversion from bool for the
filters
---
server/src/domain/handler.rs | 20 ++++++++++
server/src/domain/ldap/group.rs | 54 +++++++++++---------------
server/src/domain/ldap/user.rs | 65 ++++++++++++--------------------
server/src/infra/ldap_handler.rs | 44 +++++++++++----------
4 files changed, 87 insertions(+), 96 deletions(-)
diff --git a/server/src/domain/handler.rs b/server/src/domain/handler.rs
index a39c256..d93657d 100644
--- a/server/src/domain/handler.rs
+++ b/server/src/domain/handler.rs
@@ -27,6 +27,16 @@ pub enum UserRequestFilter {
MemberOfId(GroupId),
}
+impl From for UserRequestFilter {
+ fn from(val: bool) -> Self {
+ if val {
+ Self::And(vec![])
+ } else {
+ Self::Not(Box::new(Self::And(vec![])))
+ }
+ }
+}
+
#[derive(PartialEq, Eq, Debug, Serialize, Deserialize, Clone)]
pub enum GroupRequestFilter {
And(Vec),
@@ -39,6 +49,16 @@ pub enum GroupRequestFilter {
Member(UserId),
}
+impl From for GroupRequestFilter {
+ fn from(val: bool) -> Self {
+ if val {
+ Self::And(vec![])
+ } else {
+ Self::Not(Box::new(Self::And(vec![])))
+ }
+ }
+}
+
#[derive(PartialEq, Eq, Debug, Serialize, Deserialize, Clone, Default)]
pub struct CreateUserRequest {
// Same fields as User, but no creation_date, and with password.
diff --git a/server/src/domain/ldap/group.rs b/server/src/domain/ldap/group.rs
index 2a4e37f..4555e67 100644
--- a/server/src/domain/ldap/group.rs
+++ b/server/src/domain/ldap/group.rs
@@ -121,25 +121,20 @@ fn convert_group_filter(
)?;
Ok(GroupRequestFilter::Member(user_name))
}
- "objectclass" => match value.as_str() {
- "groupofuniquenames" | "groupofnames" => Ok(GroupRequestFilter::And(vec![])),
- _ => Ok(GroupRequestFilter::Not(Box::new(GroupRequestFilter::And(
- vec![],
- )))),
- },
- "dn" => Ok(
- match get_group_id_from_distinguished_name(
- value.to_ascii_lowercase().as_str(),
- &ldap_info.base_dn,
- &ldap_info.base_dn_str,
- ) {
- Ok(value) => GroupRequestFilter::DisplayName(value),
- Err(_) => {
- warn!("Invalid dn filter on group: {}", value);
- GroupRequestFilter::Not(Box::new(GroupRequestFilter::And(vec![])))
- }
- },
- ),
+ "objectclass" => Ok(GroupRequestFilter::from(matches!(
+ value.as_str(),
+ "groupofuniquenames" | "groupofnames"
+ ))),
+ "dn" => Ok(get_group_id_from_distinguished_name(
+ value.to_ascii_lowercase().as_str(),
+ &ldap_info.base_dn,
+ &ldap_info.base_dn_str,
+ )
+ .map(GroupRequestFilter::DisplayName)
+ .unwrap_or_else(|_| {
+ warn!("Invalid dn filter on group: {}", value);
+ GroupRequestFilter::from(false)
+ })),
_ => match map_group_field(field) {
Some(GroupColumn::DisplayName) => {
Ok(GroupRequestFilter::DisplayName(value.to_string()))
@@ -158,9 +153,7 @@ fn convert_group_filter(
field
);
}
- Ok(GroupRequestFilter::Not(Box::new(GroupRequestFilter::And(
- vec![],
- ))))
+ Ok(GroupRequestFilter::from(false))
}
},
}
@@ -174,17 +167,12 @@ fn convert_group_filter(
LdapFilter::Not(filter) => Ok(GroupRequestFilter::Not(Box::new(rec(filter)?))),
LdapFilter::Present(field) => {
let field = &field.to_ascii_lowercase();
- if field == "objectclass"
- || field == "dn"
- || field == "distinguishedname"
- || map_group_field(field).is_some()
- {
- Ok(GroupRequestFilter::And(vec![]))
- } else {
- Ok(GroupRequestFilter::Not(Box::new(GroupRequestFilter::And(
- vec![],
- ))))
- }
+ Ok(GroupRequestFilter::from(
+ field == "objectclass"
+ || field == "dn"
+ || field == "distinguishedname"
+ || map_group_field(field).is_some(),
+ ))
}
_ => Err(LdapError {
code: LdapResultCode::UnwillingToPerform,
diff --git a/server/src/domain/ldap/user.rs b/server/src/domain/ldap/user.rs
index 20b06c9..caddb6d 100644
--- a/server/src/domain/ldap/user.rs
+++ b/server/src/domain/ldap/user.rs
@@ -134,35 +134,27 @@ fn convert_user_filter(ldap_info: &LdapInfo, filter: &LdapFilter) -> LdapResult<
LdapFilter::Equality(field, value) => {
let field = &field.to_ascii_lowercase();
match field.as_str() {
- "memberof" => {
- let group_name = get_group_id_from_distinguished_name(
+ "memberof" => Ok(UserRequestFilter::MemberOf(
+ get_group_id_from_distinguished_name(
&value.to_ascii_lowercase(),
&ldap_info.base_dn,
&ldap_info.base_dn_str,
- )?;
- Ok(UserRequestFilter::MemberOf(group_name))
- }
- "objectclass" => match value.to_ascii_lowercase().as_str() {
- "person" | "inetorgperson" | "posixaccount" | "mailaccount" => {
- Ok(UserRequestFilter::And(vec![]))
- }
- _ => Ok(UserRequestFilter::Not(Box::new(UserRequestFilter::And(
- vec![],
- )))),
- },
- "dn" => Ok(
- match get_user_id_from_distinguished_name(
- value.to_ascii_lowercase().as_str(),
- &ldap_info.base_dn,
- &ldap_info.base_dn_str,
- ) {
- Ok(value) => UserRequestFilter::UserId(value),
- Err(_) => {
- warn!("Invalid dn filter on user: {}", value);
- UserRequestFilter::Not(Box::new(UserRequestFilter::And(vec![])))
- }
- },
- ),
+ )?,
+ )),
+ "objectclass" => Ok(UserRequestFilter::from(matches!(
+ value.to_ascii_lowercase().as_str(),
+ "person" | "inetorgperson" | "posixaccount" | "mailaccount"
+ ))),
+ "dn" => Ok(get_user_id_from_distinguished_name(
+ value.to_ascii_lowercase().as_str(),
+ &ldap_info.base_dn,
+ &ldap_info.base_dn_str,
+ )
+ .map(UserRequestFilter::UserId)
+ .unwrap_or_else(|_| {
+ warn!("Invalid dn filter on user: {}", value);
+ UserRequestFilter::from(false)
+ })),
_ => match map_user_field(field) {
Some(UserColumn::UserId) => Ok(UserRequestFilter::UserId(UserId::new(value))),
Some(field) => Ok(UserRequestFilter::Equality(field, value.clone())),
@@ -174,9 +166,7 @@ fn convert_user_filter(ldap_info: &LdapInfo, filter: &LdapFilter) -> LdapResult<
field
);
}
- Ok(UserRequestFilter::Not(Box::new(UserRequestFilter::And(
- vec![],
- ))))
+ Ok(UserRequestFilter::from(false))
}
},
}
@@ -184,17 +174,12 @@ fn convert_user_filter(ldap_info: &LdapInfo, filter: &LdapFilter) -> LdapResult<
LdapFilter::Present(field) => {
let field = &field.to_ascii_lowercase();
// Check that it's a field we support.
- if field == "objectclass"
- || field == "dn"
- || field == "distinguishedname"
- || map_user_field(field).is_some()
- {
- Ok(UserRequestFilter::And(vec![]))
- } else {
- Ok(UserRequestFilter::Not(Box::new(UserRequestFilter::And(
- vec![],
- ))))
- }
+ Ok(UserRequestFilter::from(
+ field == "objectclass"
+ || field == "dn"
+ || field == "distinguishedname"
+ || map_user_field(field).is_some(),
+ ))
}
_ => Err(LdapError {
code: LdapResultCode::UnwillingToPerform,
diff --git a/server/src/infra/ldap_handler.rs b/server/src/infra/ldap_handler.rs
index 6287573..2c5e6ce 100644
--- a/server/src/infra/ldap_handler.rs
+++ b/server/src/infra/ldap_handler.rs
@@ -778,7 +778,7 @@ mod tests {
mock.expect_list_users()
.with(
eq(Some(UserRequestFilter::And(vec![
- UserRequestFilter::And(vec![]),
+ UserRequestFilter::from(true),
UserRequestFilter::UserId(UserId::new("test")),
]))),
eq(false),
@@ -813,7 +813,7 @@ mod tests {
async fn test_search_readonly_user() {
let mut mock = MockTestBackendHandler::new();
mock.expect_list_users()
- .with(eq(Some(UserRequestFilter::And(vec![]))), eq(false))
+ .with(eq(Some(UserRequestFilter::from(true))), eq(false))
.times(1)
.return_once(|_, _| Ok(vec![]));
let mut ldap_handler = setup_bound_readonly_handler(mock).await;
@@ -830,7 +830,7 @@ mod tests {
async fn test_search_member_of() {
let mut mock = MockTestBackendHandler::new();
mock.expect_list_users()
- .with(eq(Some(UserRequestFilter::And(vec![]))), eq(true))
+ .with(eq(Some(UserRequestFilter::from(true))), eq(true))
.times(1)
.return_once(|_, _| {
Ok(vec![UserAndGroups {
@@ -873,7 +873,7 @@ mod tests {
mock.expect_list_users()
.with(
eq(Some(UserRequestFilter::And(vec![
- UserRequestFilter::And(vec![]),
+ UserRequestFilter::from(true),
UserRequestFilter::UserId(UserId::new("bob")),
]))),
eq(false),
@@ -1131,7 +1131,7 @@ mod tests {
async fn test_search_groups() {
let mut mock = MockTestBackendHandler::new();
mock.expect_list_groups()
- .with(eq(Some(GroupRequestFilter::And(vec![]))))
+ .with(eq(Some(GroupRequestFilter::from(true))))
.times(1)
.return_once(|_| {
Ok(vec![
@@ -1218,14 +1218,12 @@ mod tests {
GroupRequestFilter::DisplayName("group_1".to_string()),
GroupRequestFilter::Member(UserId::new("bob")),
GroupRequestFilter::DisplayName("rockstars".to_string()),
- GroupRequestFilter::And(vec![]),
- GroupRequestFilter::And(vec![]),
- GroupRequestFilter::And(vec![]),
- GroupRequestFilter::And(vec![]),
- GroupRequestFilter::Not(Box::new(GroupRequestFilter::Not(Box::new(
- GroupRequestFilter::And(vec![]),
- )))),
- GroupRequestFilter::Not(Box::new(GroupRequestFilter::And(vec![]))),
+ GroupRequestFilter::from(true),
+ GroupRequestFilter::from(true),
+ GroupRequestFilter::from(true),
+ GroupRequestFilter::from(true),
+ GroupRequestFilter::Not(Box::new(GroupRequestFilter::from(false))),
+ GroupRequestFilter::from(false),
]))))
.times(1)
.return_once(|_| {
@@ -1321,7 +1319,7 @@ mod tests {
let mut mock = MockTestBackendHandler::new();
mock.expect_list_groups()
.with(eq(Some(GroupRequestFilter::And(vec![
- GroupRequestFilter::And(vec![]),
+ GroupRequestFilter::from(true),
GroupRequestFilter::DisplayName("rockstars".to_string()),
]))))
.times(1)
@@ -1409,12 +1407,12 @@ mod tests {
"bob",
)))),
UserRequestFilter::UserId("bob_1".to_string().into()),
- UserRequestFilter::And(vec![]),
- UserRequestFilter::Not(Box::new(UserRequestFilter::And(vec![]))),
- UserRequestFilter::And(vec![]),
- UserRequestFilter::And(vec![]),
- UserRequestFilter::Not(Box::new(UserRequestFilter::And(vec![]))),
- UserRequestFilter::Not(Box::new(UserRequestFilter::And(vec![]))),
+ UserRequestFilter::from(true),
+ UserRequestFilter::from(false),
+ UserRequestFilter::from(true),
+ UserRequestFilter::from(true),
+ UserRequestFilter::from(false),
+ UserRequestFilter::from(false),
],
)]))),
eq(false),
@@ -1562,7 +1560,7 @@ mod tests {
}])
});
mock.expect_list_groups()
- .with(eq(Some(GroupRequestFilter::And(vec![]))))
+ .with(eq(Some(GroupRequestFilter::from(true))))
.times(1)
.return_once(|_| {
Ok(vec![Group {
@@ -1637,7 +1635,7 @@ mod tests {
}])
});
mock.expect_list_groups()
- .with(eq(Some(GroupRequestFilter::And(vec![]))))
+ .with(eq(Some(GroupRequestFilter::from(true))))
.returning(|_| {
Ok(vec![Group {
id: GroupId(1),
@@ -2093,7 +2091,7 @@ mod tests {
async fn test_search_filter_non_attribute() {
let mut mock = MockTestBackendHandler::new();
mock.expect_list_users()
- .with(eq(Some(UserRequestFilter::And(vec![]))), eq(false))
+ .with(eq(Some(UserRequestFilter::from(true))), eq(false))
.times(1)
.return_once(|_, _| Ok(vec![]));
let mut ldap_handler = setup_bound_admin_handler(mock).await;
From d722be889689631320191c0e14506fcb72788a3d Mon Sep 17 00:00:00 2001
From: Igor Rzegocki
Date: Thu, 19 Jan 2023 11:30:25 +0100
Subject: [PATCH 12/62] server: add option to use insecure SMTP connection
---
lldap_config.docker_template.toml | 2 +-
server/src/infra/cli.rs | 1 +
server/src/infra/configuration.rs | 3 +++
server/src/infra/mail.rs | 19 ++++++++++++++-----
4 files changed, 19 insertions(+), 6 deletions(-)
diff --git a/lldap_config.docker_template.toml b/lldap_config.docker_template.toml
index c16fd3f..02e2aac 100644
--- a/lldap_config.docker_template.toml
+++ b/lldap_config.docker_template.toml
@@ -113,7 +113,7 @@ key_file = "/data/private_key"
#server="smtp.gmail.com"
## The SMTP port.
#port=587
-## How the connection is encrypted, either "TLS" or "STARTTLS".
+## How the connection is encrypted, either "NONE" (no encryption), "TLS" or "STARTTLS".
#smtp_encryption = "TLS"
## The SMTP user, usually your email address.
#user="sender@gmail.com"
diff --git a/server/src/infra/cli.rs b/server/src/infra/cli.rs
index a31a968..ab1ba42 100644
--- a/server/src/infra/cli.rs
+++ b/server/src/infra/cli.rs
@@ -117,6 +117,7 @@ pub struct LdapsOpts {
clap::arg_enum! {
#[derive(Clone, Debug, Deserialize, Serialize)]
pub enum SmtpEncryption {
+ NONE,
TLS,
STARTTLS,
}
diff --git a/server/src/infra/configuration.rs b/server/src/infra/configuration.rs
index 209adbe..11e517c 100644
--- a/server/src/infra/configuration.rs
+++ b/server/src/infra/configuration.rs
@@ -266,6 +266,9 @@ impl ConfigOverrider for SmtpOpts {
if let Some(password) = &self.smtp_password {
config.smtp_options.password = SecUtf8::from(password.clone());
}
+ if let Some(smtp_encryption) = &self.smtp_encryption {
+ config.smtp_options.smtp_encryption = smtp_encryption.clone();
+ }
if let Some(tls_required) = self.smtp_tls_required {
config.smtp_options.tls_required = Some(tls_required);
}
diff --git a/server/src/infra/mail.rs b/server/src/infra/mail.rs
index bbac6c0..c67614b 100644
--- a/server/src/infra/mail.rs
+++ b/server/src/infra/mail.rs
@@ -26,12 +26,21 @@ async fn send_email(to: Mailbox, subject: &str, body: String, options: &MailOpti
options.user.clone(),
options.password.unsecure().to_string(),
);
- let relay_factory = match options.smtp_encryption {
- SmtpEncryption::TLS => AsyncSmtpTransport::::relay,
- SmtpEncryption::STARTTLS => AsyncSmtpTransport::::starttls_relay,
+ let mailer = match options.smtp_encryption {
+ SmtpEncryption::NONE => {
+ AsyncSmtpTransport::::builder_dangerous(&options.server)
+ }
+ SmtpEncryption::TLS => AsyncSmtpTransport::::relay(&options.server)?,
+ SmtpEncryption::STARTTLS => {
+ AsyncSmtpTransport::::starttls_relay(&options.server)?
+ }
};
- let mailer = relay_factory(&options.server)?.credentials(creds).build();
- mailer.send(email).await?;
+ mailer
+ .credentials(creds)
+ .port(options.port)
+ .build()
+ .send(email)
+ .await?;
Ok(())
}
From 0ae1597ecda9557f471c1dcf6c263fc5b7dea042 Mon Sep 17 00:00:00 2001
From: arcoast <81871508+arcoast@users.noreply.github.com>
Date: Sun, 22 Jan 2023 08:49:00 +0000
Subject: [PATCH 13/62] example_configs: Add Wikijs example
In response to https://github.com/nitnelave/lldap/pull/424#discussion_r1083280235
---
README.md | 1 +
example_configs/wikijs.md | 64 +++++++++++++++++++++++++++++++++++++++
2 files changed, 65 insertions(+)
create mode 100644 example_configs/wikijs.md
diff --git a/README.md b/README.md
index 9173f52..89c1279 100644
--- a/README.md
+++ b/README.md
@@ -250,6 +250,7 @@ folder for help with:
- [Vaultwarden](example_configs/vaultwarden.md)
- [WeKan](example_configs/wekan.md)
- [WG Portal](example_configs/wg_portal.env.example)
+ - [WikiJS](example_configs/wikijs.md)
- [XBackBone](example_configs/xbackbone_config.php)
- [Zendto](example_configs/zendto.md)
diff --git a/example_configs/wikijs.md b/example_configs/wikijs.md
new file mode 100644
index 0000000..07827e5
--- /dev/null
+++ b/example_configs/wikijs.md
@@ -0,0 +1,64 @@
+# Configuration for WikiJS
+Replace `dc=example,dc=com` with your LLDAP configured domain.
+### LDAP URL
+```
+ldap://lldap:3890
+```
+### Admin Bind DN
+```
+uid=admin,ou=people,dc=example,dc=com
+```
+or
+```
+uid=readonlyuser,ou=people,dc=example,dc=com
+```
+### Admin Bind Credentials
+```
+ADMINPASSWORD
+```
+or
+```
+READONLYUSERPASSWORD
+```
+### Search Base
+```
+ou=people,dc=example,dc=com
+```
+### Search Filter
+If you wish the permitted users to be restricted to just the `wiki` group:
+```
+(&(memberof=cn=wiki,ou=groups,dc=example,dc=com)(|(uid={{username}})(mail={{username}))(objectClass=person))
+```
+If you wish any of the registered LLDAP users to be permitted to use WikiJS:
+```
+(&(|(uid={{username}})(mail={{username}))(objectClass=person))
+```
+### Use TLS
+Left toggled off
+### Verify TLS Certificate
+Left toggled off
+### TLS Certificate Path
+Left blank
+### Unique ID Field Mapping
+```
+uid
+```
+### Email Field Mapping
+```
+mail
+```
+### Display Name Field Mapping
+```
+givenname
+```
+### Avatar Picture Field Mapping
+```
+jpegPhoto
+```
+### Allow self-registration
+Toggled on
+### Limit to specific email domains
+Left blank
+### Assign to group
+I created a group called `users` and assign my LDAP users to that by default.
+You can use the local admin account to login and promote an LDAP user to `admin` group if you wish and then deactivate the local login option
From df1169e06d61eea555052732ccb24679f45643a5 Mon Sep 17 00:00:00 2001
From: Dedy Martadinata S
Date: Sun, 22 Jan 2023 17:10:26 +0700
Subject: [PATCH 14/62] docker: simplify binary build, add db integration test
---
.github/workflows/Dockerfile.ci.alpine | 12 +-
.github/workflows/Dockerfile.ci.debian | 12 +-
.github/workflows/Dockerfile.dev | 16 +-
.github/workflows/docker-build-static.yml | 315 ++++++++++------------
4 files changed, 170 insertions(+), 185 deletions(-)
diff --git a/.github/workflows/Dockerfile.ci.alpine b/.github/workflows/Dockerfile.ci.alpine
index 47f778f..5318fd8 100644
--- a/.github/workflows/Dockerfile.ci.alpine
+++ b/.github/workflows/Dockerfile.ci.alpine
@@ -10,8 +10,8 @@ RUN mkdir -p target/
RUN mkdir -p /lldap/app
RUN if [ "${TARGETPLATFORM}" = "linux/amd64" ]; then \
- mv bin/amd64-lldap-bin/lldap target/lldap && \
- mv bin/amd64-migration-tool-bin/migration-tool target/migration-tool && \
+ mv bin/x86_64-unknown-linux-musl-lldap-bin/lldap target/lldap && \
+ mv bin/x86_64-unknown-linux-musl-migration-tool-bin/migration-tool target/migration-tool && \
chmod +x target/lldap && \
chmod +x target/migration-tool && \
ls -la target/ . && \
@@ -19,8 +19,8 @@ RUN if [ "${TARGETPLATFORM}" = "linux/amd64" ]; then \
; fi
RUN if [ "${TARGETPLATFORM}" = "linux/arm64" ]; then \
- mv bin/aarch64-lldap-bin/lldap target/lldap && \
- mv bin/aarch64-migration-tool-bin/migration-tool target/migration-tool && \
+ mv bin/aarch64-unknown-linux-musl-lldap-bin/lldap target/lldap && \
+ mv bin/aarch64-unknown-linux-musl-migration-tool-bin/migration-tool target/migration-tool && \
chmod +x target/lldap && \
chmod +x target/migration-tool && \
ls -la target/ . && \
@@ -28,8 +28,8 @@ RUN if [ "${TARGETPLATFORM}" = "linux/arm64" ]; then \
; fi
RUN if [ "${TARGETPLATFORM}" = "linux/arm/v7" ]; then \
- mv bin/armhf-lldap-bin/lldap target/lldap && \
- mv bin/armhf-migration-tool-bin/migration-tool target/migration-tool && \
+ mv bin/armv7-unknown-linux-gnueabihf-lldap-bin/lldap target/lldap && \
+ mv bin/armv7-unknown-linux-gnueabihf-migration-tool-bin/migration-tool target/migration-tool && \
chmod +x target/lldap && \
chmod +x target/migration-tool && \
ls -la target/ . && \
diff --git a/.github/workflows/Dockerfile.ci.debian b/.github/workflows/Dockerfile.ci.debian
index 3f7c45b..b27b4e2 100644
--- a/.github/workflows/Dockerfile.ci.debian
+++ b/.github/workflows/Dockerfile.ci.debian
@@ -10,8 +10,8 @@ RUN mkdir -p target/
RUN mkdir -p /lldap/app
RUN if [ "${TARGETPLATFORM}" = "linux/amd64" ]; then \
- mv bin/amd64-lldap-bin/lldap target/lldap && \
- mv bin/amd64-migration-tool-bin/migration-tool target/migration-tool && \
+ mv bin/x86_64-unknown-linux-musl-lldap-bin/lldap target/lldap && \
+ mv bin/x86_64-unknown-linux-musl-migration-tool-bin/migration-tool target/migration-tool && \
chmod +x target/lldap && \
chmod +x target/migration-tool && \
ls -la target/ . && \
@@ -19,8 +19,8 @@ RUN if [ "${TARGETPLATFORM}" = "linux/amd64" ]; then \
; fi
RUN if [ "${TARGETPLATFORM}" = "linux/arm64" ]; then \
- mv bin/aarch64-lldap-bin/lldap target/lldap && \
- mv bin/aarch64-migration-tool-bin/migration-tool target/migration-tool && \
+ mv bin/aarch64-unknown-linux-musl-lldap-bin/lldap target/lldap && \
+ mv bin/aarch64-unknown-linux-musl-migration-tool-bin/migration-tool target/migration-tool && \
chmod +x target/lldap && \
chmod +x target/migration-tool && \
ls -la target/ . && \
@@ -28,8 +28,8 @@ RUN if [ "${TARGETPLATFORM}" = "linux/arm64" ]; then \
; fi
RUN if [ "${TARGETPLATFORM}" = "linux/arm/v7" ]; then \
- mv bin/armhf-lldap-bin/lldap target/lldap && \
- mv bin/armhf-migration-tool-bin/migration-tool target/migration-tool && \
+ mv bin/armv7-unknown-linux-gnueabihf-lldap-bin/lldap target/lldap && \
+ mv bin/armv7-unknown-linux-gnueabihf-migration-tool-bin/migration-tool target/migration-tool && \
chmod +x target/lldap && \
chmod +x target/migration-tool && \
ls -la target/ . && \
diff --git a/.github/workflows/Dockerfile.dev b/.github/workflows/Dockerfile.dev
index bb88144..7bca4e4 100644
--- a/.github/workflows/Dockerfile.dev
+++ b/.github/workflows/Dockerfile.dev
@@ -1,4 +1,5 @@
-FROM rust:1.65-slim-bullseye
+# Keep tracking base image
+FROM rust:1.66-slim-bullseye
# Set needed env path
ENV PATH="/opt/aarch64-linux-musl-cross/:/opt/aarch64-linux-musl-cross/bin/:/opt/x86_64-linux-musl-cross/:/opt/x86_64-linux-musl-cross/bin/:$PATH"
@@ -23,6 +24,14 @@ RUN dpkg --add-architecture arm64 && \
rm -rf /var/lib/apt/lists/* && \
rustup target add aarch64-unknown-linux-gnu
+### armhf deps
+RUN dpkg --add-architecture armhf && \
+ apt update && \
+ apt install -y gcc-arm-linux-gnueabihf g++-arm-linux-gnueabihf libc6-armhf-cross libc6-dev-armhf-cross && \
+ apt clean && \
+ rm -rf /var/lib/apt/lists/* && \
+ rustup target add armv7-unknown-linux-gnueabihf
+
### Add musl-gcc aarch64 and x86_64
RUN wget -c https://musl.cc/x86_64-linux-musl-cross.tgz && \
tar zxf ./x86_64-linux-musl-cross.tgz -C /opt && \
@@ -31,4 +40,9 @@ RUN wget -c https://musl.cc/x86_64-linux-musl-cross.tgz && \
rm ./x86_64-linux-musl-cross.tgz && \
rm ./aarch64-linux-musl-cross.tgz
+### Add musl target
+RUN rustup target add x86_64-unknown-linux-musl && \
+ rustup target add aarch64-unknown-linux-musl
+
+
CMD ["bash"]
diff --git a/.github/workflows/docker-build-static.yml b/.github/workflows/docker-build-static.yml
index cc8146c..620e8c5 100644
--- a/.github/workflows/docker-build-static.yml
+++ b/.github/workflows/docker-build-static.yml
@@ -19,14 +19,8 @@ on:
env:
CARGO_TERM_COLOR: always
-# In total 5 jobs, all the jobs are containerized
-# ---
-#######################################################################################
-# GitHub actions randomly timeout when downloading musl-gcc #
-# Using lldap dev image based on https://hub.docker.com/_/rust and musl-gcc bundled #
-# Look into .github/workflows/Dockerfile.dev for development image details #
-#######################################################################################
+### CI Docs
# build-ui , create/compile the web
### install wasm
@@ -34,20 +28,36 @@ env:
### run app/build.sh
### upload artifacts
-# builds-armhf, build-aarch64, build-amd64 create binary for respective arch
-### Add non-native architecture dpkg --add-architecture XXX
-### Install dev tool gcc g++, etc. per respective arch
+# build-bin
+## build-armhf, build-aarch64, build-amd64 , create binary for respective arch
+#######################################################################################
+# GitHub actions randomly timeout when downloading musl-gcc, using custom dev image #
+# Look into .github/workflows/Dockerfile.dev for development image details #
+# Using lldap dev image based on https://hub.docker.com/_/rust and musl-gcc bundled #
+#######################################################################################
### Cargo build
-### Upload artifacts
-
-## the CARGO_ env
-#CARGO_TARGET_ARMV7_UNKNOWN_LINUX_GNUEABIHF_LINKER: arm-linux-gnueabihf-gcc
-# This will determine which architecture lib will be used.
+### aarch64 and amd64 is musl based
+### armv7 is glibc based, musl had issue with time_t when cross compile https://github.com/rust-lang/libc/issues/1848
# build-ui,builds-armhf, build-aarch64, build-amd64 will upload artifacts will be used next job
-# build-docker-image job will fetch artifacts and run Dockerfile.ci then push the image.
-# cache based on Cargo.lock
+# lldap-test
+### will run lldap with postgres, mariadb and sqlite backend, do selfcheck command.
+
+# Build docker image
+### Triplet docker image arch with debian base
+### amd64 & aarch64 with alpine base
+# build-docker-image job will fetch artifacts and run Dockerfile.ci then push the image.
+### Look into .github/workflows/Dockerfile.ci.debian or .github/workflowds/Dockerfile.ci.alpine
+
+# create release artifacts
+### Fetch artifacts
+### Clean up web artifact
+### Setup folder structure
+### Compress
+### Upload
+
+# cache based on Cargo.lock per cargo target
jobs:
build-ui:
@@ -68,124 +78,39 @@ jobs:
key: lldap-ui-${{ hashFiles('**/Cargo.lock') }}
restore-keys: |
lldap-ui-
- - name: install rollup nodejs
+ - name: Install rollup (nodejs)
run: npm install -g rollup
- - name: add wasm target
+ - name: Add wasm target (rust)
run: rustup target add wasm32-unknown-unknown
- - name: install wasm-pack with cargo
+ - name: Install wasm-pack with cargo
run: cargo install wasm-pack || true
env:
RUSTFLAGS: ""
- - name: build frontend
+ - name: Build frontend
run: ./app/build.sh
- - name: check path
+ - name: Check build path
run: ls -al app/
- - name: upload ui artifacts
+ - name: Upload ui artifacts
uses: actions/upload-artifact@v3
with:
name: ui
path: app/
- build-armhf:
+
+ build-bin:
runs-on: ubuntu-latest
+ strategy:
+ matrix:
+ target: [armv7-unknown-linux-gnueabihf, aarch64-unknown-linux-musl, x86_64-unknown-linux-musl]
container:
image: nitnelave/rust-dev:latest
env:
CARGO_TARGET_ARMV7_UNKNOWN_LINUX_GNUEABIHF_LINKER: arm-linux-gnueabihf-gcc
- CARGO_TARGET_ARMV7_UNKNOWN_LINUX_MUSLEABIHF_LINKER: arm-linux-gnueabihf-ld
- CARGO_TERM_COLOR: always
- RUSTFLAGS: -Ctarget-feature=-crt-static
- CARGO_HOME: ${GITHUB_WORKSPACE}/.cargo
- steps:
- - name: add armhf architecture
- run: dpkg --add-architecture armhf
- - name: install runtime
- run: apt update && apt install -y gcc-arm-linux-gnueabihf g++-arm-linux-gnueabihf libc6-armhf-cross libc6-dev-armhf-cross tar ca-certificates
- - name: add armhf target
- run: rustup target add armv7-unknown-linux-gnueabihf
- - name: Checkout repository
- uses: actions/checkout@v3.3.0
- - uses: actions/cache@v3
- with:
- path: |
- .cargo/bin
- .cargo/registry/index
- .cargo/registry/cache
- .cargo/git/db
- target
- key: lldap-bin-armhf-${{ hashFiles('**/Cargo.lock') }}
- restore-keys: |
- lldap-bin-armhf-
- - name: compile armhf
- run: cargo build --target=armv7-unknown-linux-gnueabihf --release -p lldap -p migration-tool
- - name: check path
- run: ls -al target/release
- - name: upload armhf lldap artifacts
- uses: actions/upload-artifact@v3
- with:
- name: armhf-lldap-bin
- path: target/armv7-unknown-linux-gnueabihf/release/lldap
- - name: upload armhfmigration-tool artifacts
- uses: actions/upload-artifact@v3
- with:
- name: armhf-migration-tool-bin
- path: target/armv7-unknown-linux-gnueabihf/release/migration-tool
-
-
- build-aarch64:
- runs-on: ubuntu-latest
- container:
- image: nitnelave/rust-dev:latest
- env:
CARGO_TARGET_AARCH64_UNKNOWN_LINUX_MUSL_LINKER: aarch64-linux-musl-gcc
- CARGO_TERM_COLOR: always
- RUSTFLAGS: -Ctarget-feature=+crt-static
- CARGO_HOME: ${GITHUB_WORKSPACE}/.cargo
- steps:
- - name: Checkout repository
- uses: actions/checkout@v3.3.0
- - uses: actions/cache@v3
- with:
- path: |
- .cargo/bin
- .cargo/registry/index
- .cargo/registry/cache
- .cargo/git/db
- target
- key: lldap-bin-aarch64-${{ hashFiles('**/Cargo.lock') }}
- restore-keys: |
- lldap-bin-aarch64-
-# - name: fetch musl-gcc
-# run: |
-# wget -c https://musl.cc/aarch64-linux-musl-cross.tgz
-# tar zxf ./x86_64-linux-musl-cross.tgz -C /opt
-# echo "/opt/aarch64-linux-musl-cross:/opt/aarch64-linux-musl-cross/bin" >> $GITHUB_PATH
- - name: add musl aarch64 target
- run: rustup target add aarch64-unknown-linux-musl
- - name: build lldap aarch4
- run: cargo build --target=aarch64-unknown-linux-musl --release -p lldap -p migration-tool
- - name: check path
- run: ls -al target/aarch64-unknown-linux-musl/release/
- - name: upload aarch64 lldap artifacts
- uses: actions/upload-artifact@v3
- with:
- name: aarch64-lldap-bin
- path: target/aarch64-unknown-linux-musl/release/lldap
- - name: upload aarch64 migration-tool artifacts
- uses: actions/upload-artifact@v3
- with:
- name: aarch64-migration-tool-bin
- path: target/aarch64-unknown-linux-musl/release/migration-tool
-
- build-amd64:
- runs-on: ubuntu-latest
- container:
- image: nitnelave/rust-dev:latest
- env:
- CARGO_TERM_COLOR: always
- RUSTFLAGS: -Ctarget-feature=+crt-static
- CARGO_HOME: ${GITHUB_WORKSPACE}/.cargo
CARGO_TARGET_X86_64_UNKNOWN_LINUX_MUSL_LINKER: x86_64-linux-musl-gcc
+ CARGO_TERM_COLOR: always
+ RUSTFLAGS: -Ctarget-feature=+crt-static
+ CARGO_HOME: ${GITHUB_WORKSPACE}/.cargo
steps:
- name: Checkout repository
uses: actions/checkout@v3.3.0
@@ -197,47 +122,103 @@ jobs:
.cargo/registry/cache
.cargo/git/db
target
- key: lldap-bin-amd64-${{ hashFiles('**/Cargo.lock') }}
+ key: lldap-bin-${{ matrix.target }}-${{ hashFiles('**/Cargo.lock') }}
restore-keys: |
- lldap-bin-amd64-
- - name: install musl
- run: apt update && apt install -y musl-tools tar wget
-# - name: fetch musl-gcc
-# run: |
-# wget -c https://musl.cc/x86_64-linux-musl-cross.tgz
-# tar zxf ./x86_64-linux-musl-cross.tgz -C /opt
-# echo "/opt/x86_64-linux-musl-cross:/opt/x86_64-linux-musl-cross/bin" >> $GITHUB_PATH
- - name: add x86_64 target
- run: rustup target add x86_64-unknown-linux-musl
- - name: build x86_64 lldap
- run: cargo build --target=x86_64-unknown-linux-musl --release -p lldap -p migration-tool
- - name: check path
- run: ls -al target/x86_64-unknown-linux-musl/release/
- - name: upload amd64 lldap artifacts
+ lldap-bin-${{ matrix.target }}-
+ - name: Compile ${{ matrix.target }} lldap and migration tool
+ run: cargo build --target=${{ matrix.target }} --release -p lldap -p migration-tool
+ - name: Check path
+ run: ls -al target/release
+ - name: Upload ${{ matrix.target}} lldap artifacts
uses: actions/upload-artifact@v3
with:
- name: amd64-lldap-bin
- path: target/x86_64-unknown-linux-musl/release/lldap
- - name: upload amd64 migration-tool artifacts
+ name: ${{ matrix.target}}-lldap-bin
+ path: target/${{ matrix.target }}/release/lldap
+ - name: Upload ${{ matrix.target }} migration tool artifacts
uses: actions/upload-artifact@v3
with:
- name: amd64-migration-tool-bin
- path: target/x86_64-unknown-linux-musl/release/migration-tool
+ name: ${{ matrix.target }}-migration-tool-bin
+ path: target/${{ matrix.target }}/release/migration-tool
+
+ lldap-database-integration-test:
+ needs: [build-ui,build-bin]
+ name: LLDAP test
+ runs-on: ubuntu-latest
+ services:
+ mariadb:
+ image: mariadb:latest
+ ports:
+ - 3306:3306
+ env:
+ MYSQL_USER: lldapuser
+ MYSQL_PASSWORD: lldappass
+ MYSQL_DATABASE: lldap
+ MYSQL_ROOT_PASSWORD: rootpass
+
+ postgresql:
+ image: postgres:latest
+ ports:
+ - 5432:5432
+ env:
+ POSTGRES_USER: lldapuser
+ POSTGRES_PASSWORD: lldappass
+ POSTGRES_DB: lldap
+
+ steps:
+ - name: Download artifacts
+ uses: actions/download-artifact@v3
+ with:
+ name: x86_64-unknown-linux-musl-lldap-bin
+ path: bin/
+ - name: Where is the bin?
+ run: ls -alR bin
+ - name: Set executables to LLDAP
+ run: chmod +x bin/lldap
+
+ - name: Run lldap with postgres DB and healthcheck
+ run: |
+ bin/lldap run &
+ sleep 10s
+ bin/lldap healthcheck
+ env:
+ LLDAP_database_url: postgres://lldapuser:lldappass@localhost/lldap
+ LLDAP_ldap_port: 3890
+ LLDAP_http_port: 17170
+
+
+ - name: Run lldap with mariadb DB (MySQL Compatible) and healthcheck
+ run: |
+ bin/lldap run &
+ sleep 10s
+ bin/lldap healthcheck
+ env:
+ LLDAP_database_url: mysql://lldapuser:lldappass@localhost/lldap
+ LLDAP_ldap_port: 3891
+ LLDAP_http_port: 17171
+
+
+ - name: Run lldap with sqlite DB and healthcheck
+ run: |
+ bin/lldap run &
+ sleep 10s
+ bin/lldap healthcheck
+ env:
+ LLDAP_database_url: sqlite://users.db?mode=rwc
+ LLDAP_ldap_port: 3892
+ LLDAP_http_port: 17172
build-docker-image:
- needs: [build-ui,build-armhf,build-aarch64,build-amd64]
+ needs: [build-ui, build-bin]
name: Build Docker image
runs-on: ubuntu-latest
permissions:
contents: read
packages: write
steps:
- - name: install rsync
- run: sudo apt update && sudo apt install -y rsync
- - name: fetch repo
+ - name: Checkout repository
uses: actions/checkout@v3.3.0
- - name: Download All Artifacts
+ - name: Download all artifacts
uses: actions/download-artifact@v3
with:
path: bin
@@ -248,7 +229,7 @@ jobs:
name: ui
path: web
- - name: setup qemu
+ - name: Setup QEMU
uses: docker/setup-qemu-action@v2
- uses: docker/setup-buildx-action@v2
@@ -267,13 +248,6 @@ jobs:
type=semver,pattern={{major}}.{{minor}}
type=semver,pattern={{major}}
type=sha
- - name: Cache Docker layers
- uses: actions/cache@v3
- with:
- path: /tmp/.buildx-cache
- key: ${{ runner.os }}-buildx-${{ github.sha }}
- restore-keys: |
- ${{ runner.os }}-buildx-
- name: parse tag
uses: gacts/github-slug@v1
@@ -298,8 +272,8 @@ jobs:
platforms: linux/amd64,linux/arm64
file: ./.github/workflows/Dockerfile.ci.alpine
tags: nitnelave/lldap:latest, nitnelave/lldap:latest-alpine
- cache-from: type=local,src=/tmp/.buildx-cache
- cache-to: type=local,dest=/tmp/.buildx-cache-new
+ cache-from: type=gha,mode=max
+ cache-to: type=gha,mode=max
- name: Build and push latest debian
if: github.event_name != 'release'
@@ -310,8 +284,8 @@ jobs:
platforms: linux/amd64,linux/arm64,linux/arm/v7
file: ./.github/workflows/Dockerfile.ci.debian
tags: nitnelave/lldap:latest-debian
- cache-from: type=local,src=/tmp/.buildx-cache
- cache-to: type=local,dest=/tmp/.buildx-cache-new
+ cache-from: type=gha,mode=max
+ cache-to: type=gha,mode=max
########################################
#### docker image :semver tag build ####
@@ -326,8 +300,8 @@ jobs:
# Tag as latest, stable, semver, major, major.minor and major.minor.patch.
file: ./.github/workflows/Dockerfile.ci.alpine
tags: nitnelave/lldap:stable, nitnelave/lldap:stable-alpine, nitnelave/lldap:v${{ steps.slug.outputs.version-semantic }}, nitnelave/lldap:v${{ steps.slug.outputs.version-major }}, nitnelave/lldap:v${{ steps.slug.outputs.version-major }}.${{ steps.slug.outputs.version-minor }}, nitnelave/lldap:v${{ steps.slug.outputs.version-major }}.${{ steps.slug.outputs.version-minor }}.${{ steps.slug.outputs.version-patch }}, nitnelave/lldap:v${{ steps.slug.outputs.version-semantic }}-alpine, nitnelave/lldap:v${{ steps.slug.outputs.version-major }}-alpine, nitnelave/lldap:v${{ steps.slug.outputs.version-major }}-alpine.${{ steps.slug.outputs.version-minor }}-alpine, nitnelave/lldap:v${{ steps.slug.outputs.version-major }}.${{ steps.slug.outputs.version-minor }}.${{ steps.slug.outputs.version-patch }}-alpine
- cache-from: type=local,src=/tmp/.buildx-cache
- cache-to: type=local,dest=/tmp/.buildx-cache-new
+ cache-from: type=gha,mode=max
+ cache-to: type=gha,mode=max
- name: Build and push release debian
if: github.event_name == 'release'
@@ -339,11 +313,8 @@ jobs:
# Tag as latest, stable, semver, major, major.minor and major.minor.patch.
file: ./.github/workflows/Dockerfile.ci.debian
tags: nitnelave/lldap:stable-debian, nitnelave/lldap:v${{ steps.slug.outputs.version-semantic }}-debian, nitnelave/lldap:v${{ steps.slug.outputs.version-major }}-debian, nitnelave/lldap:v${{ steps.slug.outputs.version-major }}.${{ steps.slug.outputs.version-minor }}-debian, nitnelave/lldap:v${{ steps.slug.outputs.version-major }}.${{ steps.slug.outputs.version-minor }}.${{ steps.slug.outputs.version-patch }}-debian
- cache-from: type=local,src=/tmp/.buildx-cache
- cache-to: type=local,dest=/tmp/.buildx-cache-new
-
- - name: Move cache
- run: rsync -r /tmp/.buildx-cache-new /tmp/.buildx-cache --delete
+ cache-from: type=gha,mode=max
+ cache-to: type=gha,mode=max
- name: Update repo description
if: github.event_name != 'pull_request'
@@ -357,12 +328,12 @@ jobs:
### Download artifacts, clean up ui, upload to release page ###
###############################################################
create-release-artifacts:
- needs: [build-ui,build-armhf,build-aarch64,build-amd64]
+ needs: [build-ui, build-bin]
name: Create release artifacts
if: github.event_name == 'release'
runs-on: ubuntu-latest
steps:
- - name: Download All Artifacts
+ - name: Download all artifacts
uses: actions/download-artifact@v3
with:
path: bin/
@@ -370,12 +341,12 @@ jobs:
run: ls -alR bin/
- name: Fixing Filename
run: |
- mv bin/aarch64-lldap-bin/lldap bin/aarch64-lldap
- mv bin/amd64-lldap-bin/lldap bin/amd64-lldap
- mv bin/armhf-lldap-bin/lldap bin/armhf-lldap
- mv bin/aarch64-migration-tool-bin/migration-tool bin/aarch64-migration-tool
- mv bin/amd64-migration-tool-bin/migration-tool bin/amd64-migration-tool
- mv bin/armhf-migration-tool-bin/migration-tool bin/armhf-migration-tool
+ mv bin/aarch64-unknown-linux-musl-lldap-bin/lldap bin/aarch64-lldap
+ mv bin/x86_64-unknown-linux-musl-lldap-bin/lldap bin/amd64-lldap
+ mv bin/armv7-unknown-linux-gnueabihf-lldap-bin/lldap bin/armhf-lldap
+ mv bin/aarch64-unknown-linux-musl-migration-tool-bin/migration-tool bin/aarch64-migration-tool
+ mv bin/x86_64-unknown-linux-musl-migration-tool-bin/migration-tool bin/amd64-migration-tool
+ mv bin/armv7-unknown-linux-gnueabihf-migration-tool-bin/migration-tool bin/armhf-migration-tool
chmod +x bin/*-lldap
chmod +x bin/*-migration-tool
@@ -384,7 +355,7 @@ jobs:
with:
name: ui
path: web
- - name: Web Cleanup
+ - name: UI (web) artifacts cleanup
run: mkdir app && mv web/index.html app/index.html && mv web/static app/static && mv web/pkg app/pkg
- name: Fetch web components
run: |
@@ -412,14 +383,14 @@ jobs:
ls -alR amd64-lldap/
ls -alR armhf-lldap/
- - name: Compress
+ - name: Packing LLDAP and Web UI
run: |
tar -czvf aarch64-lldap.tar.gz aarch64-lldap/
tar -czvf amd64-lldap.tar.gz amd64-lldap/
tar -czvf armhf-lldap.tar.gz armhf-lldap/
- - name: Upload artifacts release
+ - name: Upload compressed release
uses: ncipollo/release-action@v1
id: create_release
with:
From 3fa100be0c1644dd182075f26a07eeda0558076a Mon Sep 17 00:00:00 2001
From: Valentin Tolmer
Date: Tue, 24 Jan 2023 10:38:06 +0100
Subject: [PATCH 15/62] server: update sea-orm dependency
Fixes #405
---
Cargo.lock | 8 ++++----
server/Cargo.toml | 2 +-
2 files changed, 5 insertions(+), 5 deletions(-)
diff --git a/Cargo.lock b/Cargo.lock
index e8ace93..aca26a0 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -3428,9 +3428,9 @@ dependencies = [
[[package]]
name = "sea-orm"
-version = "0.10.3"
+version = "0.10.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "8744afc95ca462de12c2cea5a56d7e406f3be2b2683d3b05066e1afdba898bc5"
+checksum = "88694d01b528a94f90ad87f8d2f546d060d070eee180315c67d158cb69476034"
dependencies = [
"async-stream",
"async-trait",
@@ -3453,9 +3453,9 @@ dependencies = [
[[package]]
name = "sea-orm-macros"
-version = "0.10.3"
+version = "0.10.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "4ca4d01381fdcabc3818b6d39c5f1f0c885900af90da638e4001406907462784"
+checksum = "7216195de9c6b2474fd0efab486173dccd0eff21f28cc54aa4c0205d52fb3af0"
dependencies = [
"bae",
"heck 0.3.3",
diff --git a/server/Cargo.toml b/server/Cargo.toml
index 9c38656..fc35e9e 100644
--- a/server/Cargo.toml
+++ b/server/Cargo.toml
@@ -109,7 +109,7 @@ default-features = false
version = "0.24"
[dependencies.sea-orm]
-version= "0.10.3"
+version= ">=0.10.7"
default-features = false
features = ["macros", "with-chrono", "with-uuid", "sqlx-all", "runtime-actix-rustls"]
From d56de80381b24e95d9589a65d5bcd879ec0495e4 Mon Sep 17 00:00:00 2001
From: Valentin Tolmer
Date: Tue, 24 Jan 2023 13:31:22 +0100
Subject: [PATCH 16/62] server: Update lettre
---
Cargo.lock | 4 ++--
server/Cargo.toml | 2 +-
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/Cargo.lock b/Cargo.lock
index aca26a0..8c975a8 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -2211,9 +2211,9 @@ dependencies = [
[[package]]
name = "lettre"
-version = "0.10.0"
+version = "0.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "5677c78c7c7ede1dd68e8a7078012bc625449fb304e7b509b917eaaedfe6e849"
+checksum = "2eabca5e0b4d0e98e7f2243fb5b7520b6af2b65d8f87bcc86f2c75185a6ff243"
dependencies = [
"async-trait",
"base64",
diff --git a/server/Cargo.toml b/server/Cargo.toml
index fc35e9e..40f69dc 100644
--- a/server/Cargo.toml
+++ b/server/Cargo.toml
@@ -67,7 +67,7 @@ features = ["env-filter", "tracing-log"]
[dependencies.lettre]
features = ["builder", "serde", "smtp-transport", "tokio1-rustls-tls"]
default-features = false
-version = "0.10.0-rc.3"
+version = "0.10.1"
[dependencies.lldap_auth]
path = "../auth"
From 1e6a0edcfbb00ab39efd0f833f1570ba400e65bc Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Tue, 24 Jan 2023 13:40:31 +0000
Subject: [PATCH 17/62] build(deps): bump bumpalo from 3.10.0 to 3.12.0
Bumps [bumpalo](https://github.com/fitzgen/bumpalo) from 3.10.0 to 3.12.0.
- [Release notes](https://github.com/fitzgen/bumpalo/releases)
- [Changelog](https://github.com/fitzgen/bumpalo/blob/main/CHANGELOG.md)
- [Commits](https://github.com/fitzgen/bumpalo/compare/3.10.0...3.12.0)
---
updated-dependencies:
- dependency-name: bumpalo
dependency-type: indirect
...
Signed-off-by: dependabot[bot]
---
Cargo.lock | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/Cargo.lock b/Cargo.lock
index 8c975a8..1e0067b 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -649,9 +649,9 @@ dependencies = [
[[package]]
name = "bumpalo"
-version = "3.10.0"
+version = "3.12.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "37ccbd214614c6783386c1af30caf03192f17891059cecc394b4fb119e363de3"
+checksum = "0d261e256854913907f67ed06efbc3338dfe6179796deefc1ff763fc1aee5535"
[[package]]
name = "bytemuck"
From c3d18dbbe8f5bcc6091fd96d42fbf4278f070f3c Mon Sep 17 00:00:00 2001
From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com>
Date: Mon, 30 Jan 2023 20:07:23 +0000
Subject: [PATCH 18/62] build(deps): bump docker/build-push-action from 3 to 4
Bumps [docker/build-push-action](https://github.com/docker/build-push-action) from 3 to 4.
- [Release notes](https://github.com/docker/build-push-action/releases)
- [Commits](https://github.com/docker/build-push-action/compare/v3...v4)
---
updated-dependencies:
- dependency-name: docker/build-push-action
dependency-type: direct:production
update-type: version-update:semver-major
...
Signed-off-by: dependabot[bot]
---
.github/workflows/docker-build-static.yml | 8 ++++----
1 file changed, 4 insertions(+), 4 deletions(-)
diff --git a/.github/workflows/docker-build-static.yml b/.github/workflows/docker-build-static.yml
index 620e8c5..c7a4604 100644
--- a/.github/workflows/docker-build-static.yml
+++ b/.github/workflows/docker-build-static.yml
@@ -265,7 +265,7 @@ jobs:
########################################
- name: Build and push latest alpine
if: github.event_name != 'release'
- uses: docker/build-push-action@v3
+ uses: docker/build-push-action@v4
with:
context: .
push: ${{ github.event_name != 'pull_request' }}
@@ -277,7 +277,7 @@ jobs:
- name: Build and push latest debian
if: github.event_name != 'release'
- uses: docker/build-push-action@v3
+ uses: docker/build-push-action@v4
with:
context: .
push: ${{ github.event_name != 'pull_request' }}
@@ -292,7 +292,7 @@ jobs:
########################################
- name: Build and push release alpine
if: github.event_name == 'release'
- uses: docker/build-push-action@v3
+ uses: docker/build-push-action@v4
with:
context: .
platforms: linux/amd64,linux/arm64
@@ -305,7 +305,7 @@ jobs:
- name: Build and push release debian
if: github.event_name == 'release'
- uses: docker/build-push-action@v3
+ uses: docker/build-push-action@v4
with:
context: .
platforms: linux/amd64,linux/arm64,linux/arm/v7
From 58b9c28a0bf8bd4c2c6e7061670c0b180ead834f Mon Sep 17 00:00:00 2001
From: Diptesh Choudhuri
Date: Wed, 1 Feb 2023 17:32:52 +0530
Subject: [PATCH 19/62] example_configs: Add Dex example
Fixes #428.
---
README.md | 158 +++++++++++++++++----------------
example_configs/dex_config.yml | 32 +++++++
2 files changed, 113 insertions(+), 77 deletions(-)
create mode 100644 example_configs/dex_config.yml
diff --git a/README.md b/README.md
index 89c1279..ecf4c71 100644
--- a/README.md
+++ b/README.md
@@ -28,20 +28,20 @@
- - [About](#About)
- - [Installation](#Installation)
- - [With Docker](#With-Docker)
- - [From source](#From-source)
- - [Cross-compilation](#Cross-compilation)
- - [Client configuration](#Client-configuration)
- - [Compatible services](#compatible-services)
- - [General configuration guide](#general-configuration-guide)
- - [Sample client configurations](#Sample-client-configurations)
- - [Comparisons with other services](#Comparisons-with-other-services)
- - [vs OpenLDAP](#vs-openldap)
- - [vs FreeIPA](#vs-freeipa)
- - [I can't log in!](#i-cant-log-in)
- - [Contributions](#Contributions)
+- [About](#about)
+- [Installation](#installation)
+ - [With Docker](#with-docker)
+ - [From source](#from-source)
+ - [Cross-compilation](#cross-compilation)
+- [Client configuration](#client-configuration)
+ - [Compatible services](#compatible-services)
+ - [General configuration guide](#general-configuration-guide)
+ - [Sample client configurations](#sample-client-configurations)
+- [Comparisons with other services](#comparisons-with-other-services)
+ - [vs OpenLDAP](#vs-openldap)
+ - [vs FreeIPA](#vs-freeipa)
+- [I can't log in!](#i-cant-log-in)
+- [Contributions](#contributions)
## About
@@ -62,10 +62,11 @@ edit their own details or reset their password by email.
The goal is _not_ to provide a full LDAP server; if you're interested in that,
check out OpenLDAP. This server is a user management system that is:
-* simple to setup (no messing around with `slapd`),
-* simple to manage (friendly web UI),
-* low resources,
-* opinionated with basic defaults so you don't have to understand the
+
+- simple to setup (no messing around with `slapd`),
+- simple to manage (friendly web UI),
+- low resources,
+- opinionated with basic defaults so you don't have to understand the
subtleties of LDAP.
It mostly targets self-hosting servers, with open-source components like
@@ -98,14 +99,14 @@ contents are loaded into the respective configuration parameters. Note that
`_FILE` variables take precedence.
Example for docker compose:
-* You can use either the `:latest` tag image or `:stable` as used in this example.
-* `:latest` tag image contains recently pushed code or feature tests, in which some instability can be expected.
-* If `UID` and `GID` no defined LLDAP will use default `UID` and `GID` number `1000`.
-* If no `TZ` is set, default `UTC` timezone will be used.
+- You can use either the `:latest` tag image or `:stable` as used in this example.
+- `:latest` tag image contains recently pushed code or feature tests, in which some instability can be expected.
+- If `UID` and `GID` no defined LLDAP will use default `UID` and `GID` number `1000`.
+- If no `TZ` is set, default `UTC` timezone will be used.
```yaml
-version: '3'
+version: "3"
volumes:
lldap_data:
@@ -139,9 +140,9 @@ front-end.
To compile the project, you'll need:
-* nodejs 16: [nodesource nodejs installation guide](https://github.com/nodesource/distributions)
-* curl: `sudo apt install curl`
-* Rust/Cargo: [rustup.rs](https://rustup.rs/)
+- nodejs 16: [nodesource nodejs installation guide](https://github.com/nodesource/distributions)
+- curl: `sudo apt install curl`
+- Rust/Cargo: [rustup.rs](https://rustup.rs/)
Then you can compile the server (and the migration tool if you want):
@@ -155,8 +156,8 @@ just run `cargo run -- run` to run the server.
To bring up the server, you'll need to compile the frontend. In addition to
cargo, you'll need:
-* WASM-pack: `cargo install wasm-pack`
-* rollup.js: `npm install rollup`
+- WASM-pack: `cargo install wasm-pack`
+- rollup.js: `npm install rollup`
Then you can build the frontend files with `./app/build.sh` (you'll need to run
this after every front-end change to update the WASM package served).
@@ -204,14 +205,15 @@ the config).
### General configuration guide
To configure the services that will talk to LLDAP, here are the values:
- - The LDAP user DN is from the configuration. By default,
- `cn=admin,ou=people,dc=example,dc=com`.
- - The LDAP password is from the configuration (same as to log in to the web
- UI).
- - The users are all located in `ou=people,` + the base DN, so by default user
- `bob` is at `cn=bob,ou=people,dc=example,dc=com`.
- - Similarly, the groups are located in `ou=groups`, so the group `family`
- will be at `cn=family,ou=groups,dc=example,dc=com`.
+
+- The LDAP user DN is from the configuration. By default,
+ `cn=admin,ou=people,dc=example,dc=com`.
+- The LDAP password is from the configuration (same as to log in to the web
+ UI).
+- The users are all located in `ou=people,` + the base DN, so by default user
+ `bob` is at `cn=bob,ou=people,dc=example,dc=com`.
+- Similarly, the groups are located in `ou=groups`, so the group `family`
+ will be at `cn=family,ou=groups,dc=example,dc=com`.
Testing group membership through `memberOf` is supported, so you can have a
filter like: `(memberOf=cn=admins,ou=groups,dc=example,dc=com)`.
@@ -226,33 +228,35 @@ administration access to many services.
Some specific clients have been tested to work and come with sample
configuration files, or guides. See the [`example_configs`](example_configs)
folder for help with:
- - [Airsonic Advanced](example_configs/airsonic-advanced.md)
- - [Apache Guacamole](example_configs/apacheguacamole.md)
- - [Authelia](example_configs/authelia_config.yml)
- - [Bookstack](example_configs/bookstack.env.example)
- - [Calibre-Web](example_configs/calibre_web.md)
- - [Dell iDRAC](example_configs/dell_idrac.md)
- - [Dokuwiki](example_configs/dokuwiki.md)
- - [Dolibarr](example_configs/dolibarr.md)
- - [Emby](example_configs/emby.md)
- - [Gitea](example_configs/gitea.md)
- - [Grafana](example_configs/grafana_ldap_config.toml)
- - [Hedgedoc](example_configs/hedgedoc.md)
- - [Jellyfin](example_configs/jellyfin.md)
- - [Jitsi Meet](example_configs/jitsi_meet.conf)
- - [KeyCloak](example_configs/keycloak.md)
- - [Matrix](example_configs/matrix_synapse.yml)
- - [Nextcloud](example_configs/nextcloud.md)
- - [Organizr](example_configs/Organizr.md)
- - [Portainer](example_configs/portainer.md)
- - [Seafile](example_configs/seafile.md)
- - [Syncthing](example_configs/syncthing.md)
- - [Vaultwarden](example_configs/vaultwarden.md)
- - [WeKan](example_configs/wekan.md)
- - [WG Portal](example_configs/wg_portal.env.example)
- - [WikiJS](example_configs/wikijs.md)
- - [XBackBone](example_configs/xbackbone_config.php)
- - [Zendto](example_configs/zendto.md)
+
+- [Airsonic Advanced](example_configs/airsonic-advanced.md)
+- [Apache Guacamole](example_configs/apacheguacamole.md)
+- [Authelia](example_configs/authelia_config.yml)
+- [Bookstack](example_configs/bookstack.env.example)
+- [Calibre-Web](example_configs/calibre_web.md)
+- [Dell iDRAC](example_configs/dell_idrac.md)
+- [Dex](example_configs/dex_config.yml)
+- [Dokuwiki](example_configs/dokuwiki.md)
+- [Dolibarr](example_configs/dolibarr.md)
+- [Emby](example_configs/emby.md)
+- [Gitea](example_configs/gitea.md)
+- [Grafana](example_configs/grafana_ldap_config.toml)
+- [Hedgedoc](example_configs/hedgedoc.md)
+- [Jellyfin](example_configs/jellyfin.md)
+- [Jitsi Meet](example_configs/jitsi_meet.conf)
+- [KeyCloak](example_configs/keycloak.md)
+- [Matrix](example_configs/matrix_synapse.yml)
+- [Nextcloud](example_configs/nextcloud.md)
+- [Organizr](example_configs/Organizr.md)
+- [Portainer](example_configs/portainer.md)
+- [Seafile](example_configs/seafile.md)
+- [Syncthing](example_configs/syncthing.md)
+- [Vaultwarden](example_configs/vaultwarden.md)
+- [WeKan](example_configs/wekan.md)
+- [WG Portal](example_configs/wg_portal.env.example)
+- [WikiJS](example_configs/wikijs.md)
+- [XBackBone](example_configs/xbackbone_config.php)
+- [Zendto](example_configs/zendto.md)
## Comparisons with other services
@@ -291,20 +295,20 @@ use. It also comes conveniently packed in a docker container.
If you just set up the server, can get to the login page but the password you
set isn't working, try the following:
- - (For docker): Make sure that the `/data` folder is persistent, either to a
- docker volume or mounted from the host filesystem.
- - Check if there is a `lldap_config.toml` file (either in `/data` for docker
- or in the current directory). If there isn't, copy
- `lldap_config.docker_template.toml` there, and fill in the various values
- (passwords, secrets, ...).
- - Check if there is a `users.db` file (either in `/data` for docker or where
- you specified the DB URL, which defaults to the current directory). If
- there isn't, check that the user running the command (user with ID 10001
- for docker) has the rights to write to the `/data` folder. If in doubt, you
- can `chmod 777 /data` (or whatever the folder) to make it world-writeable.
- - Make sure you restart the server.
- - If it's still not working, join the
- [Discord server](https://discord.gg/h5PEdRMNyP) to ask for help.
+- (For docker): Make sure that the `/data` folder is persistent, either to a
+ docker volume or mounted from the host filesystem.
+- Check if there is a `lldap_config.toml` file (either in `/data` for docker
+ or in the current directory). If there isn't, copy
+ `lldap_config.docker_template.toml` there, and fill in the various values
+ (passwords, secrets, ...).
+- Check if there is a `users.db` file (either in `/data` for docker or where
+ you specified the DB URL, which defaults to the current directory). If
+ there isn't, check that the user running the command (user with ID 10001
+ for docker) has the rights to write to the `/data` folder. If in doubt, you
+ can `chmod 777 /data` (or whatever the folder) to make it world-writeable.
+- Make sure you restart the server.
+- If it's still not working, join the
+ [Discord server](https://discord.gg/h5PEdRMNyP) to ask for help.
## Contributions
diff --git a/example_configs/dex_config.yml b/example_configs/dex_config.yml
new file mode 100644
index 0000000..0c566ec
--- /dev/null
+++ b/example_configs/dex_config.yml
@@ -0,0 +1,32 @@
+# lldap configuration:
+# LLDAP_LDAP_BASE_DN: dc=example,dc=com
+
+# ##############################
+# rest of the Dex options
+# ##############################
+
+connectors:
+ - type: ldap
+ id: ldap
+ name: LDAP
+ config:
+ host: lldap-host # make sure it does not start with `ldap://`
+ port: 3890 # or 6360 if you have ldaps enabled
+ insecureNoSSL: true # or false if you have ldaps enabled
+ insecureSkipVerify: true # or false if you have ldaps enabled
+ bindDN: uid=admin,ou=people,dc=example,dc=com # replace admin with your admin user
+ bindPW: very-secure-password # replace with your admin password
+ userSearch:
+ baseDN: ou=people,dc=example,dc=com
+ username: uid
+ idAttr: uid
+ emailAttr: mail
+ nameAttr: displayName
+ preferredUsernameAttr: uid
+ groupSearch:
+ baseDN: ou=groups,dc=example,dc=com
+ filter: "(objectClass=groupOfUniqueNames)"
+ userMatchers:
+ - userAttr: uid
+ groupAttr: member
+ nameAttr: displayName
From 648848c816dd9dd4fa27567bb223e1baf1d22b62 Mon Sep 17 00:00:00 2001
From: Rex Zhang
Date: Wed, 8 Feb 2023 17:30:23 +0800
Subject: [PATCH 20/62] example_configs: Add note for Gitea's simple auth mode
---
example_configs/gitea.md | 6 ++++++
1 file changed, 6 insertions(+)
diff --git a/example_configs/gitea.md b/example_configs/gitea.md
index 654d76e..f14fff6 100644
--- a/example_configs/gitea.md
+++ b/example_configs/gitea.md
@@ -41,3 +41,9 @@ Check `Remove Users from syncronised teams...`
The `Map LDAP groups to Organization teams` config is JSON formatted and can be extended to as many groups as needed.
Replace every instance of `dc=example,dc=com` with your configured domain.
+
+# Configuration for Gitea in `simple auth` mode
+
+* The configuration method is the same as `BindDN` mode.
+* `BindDN` and `password` are not required
+* Gitea will not be able to pre-sync users, user account will be created at login time.
From 8f2c5b397cfc695e1fd739bca31227d7dd5505c2 Mon Sep 17 00:00:00 2001
From: Valentin Tolmer
Date: Fri, 10 Feb 2023 11:07:14 +0100
Subject: [PATCH 21/62] server: allow NULL for display_name
Fixes #387.
---
server/src/domain/sql_migrations.rs | 86 +++++++++++++++++++++++++++--
server/src/domain/sql_tables.rs | 45 ++++++++++-----
2 files changed, 114 insertions(+), 17 deletions(-)
diff --git a/server/src/domain/sql_migrations.rs b/server/src/domain/sql_migrations.rs
index 7efb152..e4ed092 100644
--- a/server/src/domain/sql_migrations.rs
+++ b/server/src/domain/sql_migrations.rs
@@ -2,10 +2,12 @@ use crate::domain::{
sql_tables::{DbConnection, SchemaVersion},
types::{GroupId, UserId, Uuid},
};
-use sea_orm::{ConnectionTrait, FromQueryResult, Statement};
+use sea_orm::{ConnectionTrait, FromQueryResult, Statement, TransactionTrait};
use sea_query::{ColumnDef, Expr, ForeignKey, ForeignKeyAction, Iden, Query, Table, Value};
use serde::{Deserialize, Serialize};
-use tracing::{instrument, warn};
+use tracing::{info, instrument, warn};
+
+use super::sql_tables::LAST_SCHEMA_VERSION;
#[derive(Iden, PartialEq, Eq, Debug, Serialize, Deserialize, Clone)]
pub enum Users {
@@ -331,11 +333,87 @@ pub async fn upgrade_to_v1(pool: &DbConnection) -> std::result::Result<(), sea_o
}
pub async fn migrate_from_version(
- _pool: &DbConnection,
+ pool: &DbConnection,
version: SchemaVersion,
) -> anyhow::Result<()> {
- if version.0 > 1 {
+ if version > LAST_SCHEMA_VERSION {
anyhow::bail!("DB version downgrading is not supported");
+ } else if version == LAST_SCHEMA_VERSION {
+ return Ok(());
}
+ info!(
+ "Upgrading DB schema from {} to {}",
+ version.0, LAST_SCHEMA_VERSION.0
+ );
+ let builder = pool.get_database_backend();
+ if version < SchemaVersion(2) {
+ // Drop the not_null constraint on display_name. Due to Sqlite, this is more complicated:
+ // - rename the display_name column to a temporary name
+ // - create the display_name column without the constraint
+ // - copy the data from the temp column to the new one
+ // - update the new one to replace empty strings with null
+ // - drop the old one
+ pool.transaction::<_, (), sea_orm::DbErr>(|transaction| {
+ Box::pin(async move {
+ #[derive(Iden)]
+ enum TempUsers {
+ TempDisplayName,
+ }
+ transaction
+ .execute(
+ builder.build(
+ Table::alter()
+ .table(Users::Table)
+ .rename_column(Users::DisplayName, TempUsers::TempDisplayName),
+ ),
+ )
+ .await?;
+ transaction
+ .execute(
+ builder.build(
+ Table::alter()
+ .table(Users::Table)
+ .add_column(ColumnDef::new(Users::DisplayName).string_len(255)),
+ ),
+ )
+ .await?;
+ transaction
+ .execute(builder.build(Query::update().table(Users::Table).value(
+ Users::DisplayName,
+ Expr::col((Users::Table, TempUsers::TempDisplayName)),
+ )))
+ .await?;
+ transaction
+ .execute(
+ builder.build(
+ Query::update()
+ .table(Users::Table)
+ .value(Users::DisplayName, Option::::None)
+ .cond_where(Expr::col(Users::DisplayName).eq("")),
+ ),
+ )
+ .await?;
+ transaction
+ .execute(
+ builder.build(
+ Table::alter()
+ .table(Users::Table)
+ .drop_column(TempUsers::TempDisplayName),
+ ),
+ )
+ .await?;
+ Ok(())
+ })
+ })
+ .await?;
+ }
+ pool.execute(
+ builder.build(
+ Query::update()
+ .table(Metadata::Table)
+ .value(Metadata::Version, Value::from(LAST_SCHEMA_VERSION)),
+ ),
+ )
+ .await?;
Ok(())
}
diff --git a/server/src/domain/sql_tables.rs b/server/src/domain/sql_tables.rs
index 0f202b0..0a81363 100644
--- a/server/src/domain/sql_tables.rs
+++ b/server/src/domain/sql_tables.rs
@@ -3,7 +3,7 @@ use sea_orm::Value;
pub type DbConnection = sea_orm::DatabaseConnection;
-#[derive(Copy, PartialEq, Eq, Debug, Clone)]
+#[derive(Copy, PartialEq, Eq, Debug, Clone, PartialOrd, Ord)]
pub struct SchemaVersion(pub i16);
impl sea_orm::TryGetable for SchemaVersion {
@@ -22,6 +22,8 @@ impl From for Value {
}
}
+pub const LAST_SCHEMA_VERSION: SchemaVersion = SchemaVersion(2);
+
pub async fn init_table(pool: &DbConnection) -> anyhow::Result<()> {
let version = {
if let Some(version) = get_schema_version(pool).await {
@@ -99,14 +101,21 @@ mod tests {
let sql_pool = get_in_memory_db().await;
sql_pool
.execute(raw_statement(
- r#"CREATE TABLE users ( user_id TEXT , creation_date TEXT);"#,
+ r#"CREATE TABLE users ( user_id TEXT, display_name TEXT, creation_date TEXT);"#,
))
.await
.unwrap();
sql_pool
.execute(raw_statement(
- r#"INSERT INTO users (user_id, creation_date)
- VALUES ("bôb", "1970-01-01 00:00:00")"#,
+ r#"INSERT INTO users (user_id, display_name, creation_date)
+ VALUES ("bôb", "", "1970-01-01 00:00:00")"#,
+ ))
+ .await
+ .unwrap();
+ sql_pool
+ .execute(raw_statement(
+ r#"INSERT INTO users (user_id, display_name, creation_date)
+ VALUES ("john", "John Doe", "1971-01-01 00:00:00")"#,
))
.await
.unwrap();
@@ -132,17 +141,27 @@ mod tests {
.await
.unwrap();
#[derive(FromQueryResult, PartialEq, Eq, Debug)]
- struct JustUuid {
+ struct SimpleUser {
+ display_name: Option,
uuid: Uuid,
}
assert_eq!(
- JustUuid::find_by_statement(raw_statement(r#"SELECT uuid FROM users"#))
- .all(&sql_pool)
- .await
- .unwrap(),
- vec![JustUuid {
- uuid: crate::uuid!("a02eaf13-48a7-30f6-a3d4-040ff7c52b04")
- }]
+ SimpleUser::find_by_statement(raw_statement(
+ r#"SELECT display_name, uuid FROM users ORDER BY display_name"#
+ ))
+ .all(&sql_pool)
+ .await
+ .unwrap(),
+ vec![
+ SimpleUser {
+ display_name: None,
+ uuid: crate::uuid!("a02eaf13-48a7-30f6-a3d4-040ff7c52b04")
+ },
+ SimpleUser {
+ display_name: Some("John Doe".to_owned()),
+ uuid: crate::uuid!("986765a5-3f03-389e-b47b-536b2d6e1bec")
+ }
+ ]
);
#[derive(FromQueryResult, PartialEq, Eq, Debug)]
struct ShortGroupDetails {
@@ -180,7 +199,7 @@ mod tests {
.unwrap()
.unwrap(),
sql_migrations::JustSchemaVersion {
- version: SchemaVersion(1)
+ version: LAST_SCHEMA_VERSION
}
);
}
From 96eb17a9632fef1326082306a5b6e70886e13ea3 Mon Sep 17 00:00:00 2001
From: Valentin Tolmer
Date: Fri, 10 Feb 2023 11:37:36 +0100
Subject: [PATCH 22/62] server: fix clippy warning
The clippy::uninlined_format_args warning in 1.67 was downgraded to
pedantic in 1.67.1 due to lack of support in rust-analyzer, so we're not
updating that one yet.
---
app/src/lib.rs | 3 ++-
migration-tool/src/main.rs | 2 ++
server/src/domain/sql_migrations.rs | 15 +++++++--------
server/src/main.rs | 3 ++-
4 files changed, 13 insertions(+), 10 deletions(-)
diff --git a/app/src/lib.rs b/app/src/lib.rs
index 3937693..c079976 100644
--- a/app/src/lib.rs
+++ b/app/src/lib.rs
@@ -1,6 +1,7 @@
#![recursion_limit = "256"]
#![forbid(non_ascii_idents)]
-#![allow(clippy::nonstandard_macro_braces)]
+#![allow(clippy::uninlined_format_args)]
+
pub mod components;
pub mod infra;
diff --git a/migration-tool/src/main.rs b/migration-tool/src/main.rs
index 7685a9c..cf1358e 100644
--- a/migration-tool/src/main.rs
+++ b/migration-tool/src/main.rs
@@ -1,3 +1,5 @@
+#![allow(clippy::uninlined_format_args)]
+
use std::collections::HashSet;
use anyhow::{anyhow, Result};
diff --git a/server/src/domain/sql_migrations.rs b/server/src/domain/sql_migrations.rs
index e4ed092..deb82c7 100644
--- a/server/src/domain/sql_migrations.rs
+++ b/server/src/domain/sql_migrations.rs
@@ -336,15 +336,14 @@ pub async fn migrate_from_version(
pool: &DbConnection,
version: SchemaVersion,
) -> anyhow::Result<()> {
- if version > LAST_SCHEMA_VERSION {
- anyhow::bail!("DB version downgrading is not supported");
- } else if version == LAST_SCHEMA_VERSION {
- return Ok(());
+ match version.cmp(&LAST_SCHEMA_VERSION) {
+ std::cmp::Ordering::Less => info!(
+ "Upgrading DB schema from {} to {}",
+ version.0, LAST_SCHEMA_VERSION.0
+ ),
+ std::cmp::Ordering::Equal => return Ok(()),
+ std::cmp::Ordering::Greater => anyhow::bail!("DB version downgrading is not supported"),
}
- info!(
- "Upgrading DB schema from {} to {}",
- version.0, LAST_SCHEMA_VERSION.0
- );
let builder = pool.get_database_backend();
if version < SchemaVersion(2) {
// Drop the not_null constraint on display_name. Due to Sqlite, this is more complicated:
diff --git a/server/src/main.rs b/server/src/main.rs
index 2c67bd9..712d0a9 100644
--- a/server/src/main.rs
+++ b/server/src/main.rs
@@ -1,6 +1,7 @@
#![forbid(unsafe_code)]
#![forbid(non_ascii_idents)]
-#![allow(clippy::nonstandard_macro_braces)]
+// TODO: Remove next line once ubuntu upgrades rustc to >=1.67.1
+#![allow(clippy::uninlined_format_args)]
use std::time::Duration;
From 63cbf30dd7c3975e88dc03f27c99909b3b8bf3dd Mon Sep 17 00:00:00 2001
From: Valentin Tolmer
Date: Fri, 10 Feb 2023 12:32:41 +0100
Subject: [PATCH 23/62] server: upgrade sea-orm to 0.11
---
Cargo.lock | 43 +++++--------------
server/Cargo.toml | 6 +--
server/src/domain/model/users.rs | 6 +--
.../src/domain/sql_group_backend_handler.rs | 2 +-
server/src/domain/sql_migrations.rs | 6 ++-
server/src/domain/sql_tables.rs | 7 ++-
server/src/domain/sql_user_backend_handler.rs | 3 +-
server/src/domain/types.rs | 21 +++++----
server/src/infra/jwt_sql_tables.rs | 6 ++-
server/src/infra/sql_backend_handler.rs | 6 +--
10 files changed, 43 insertions(+), 63 deletions(-)
diff --git a/Cargo.lock b/Cargo.lock
index 1e0067b..56eb8f9 100644
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -855,21 +855,6 @@ dependencies = [
"libc",
]
-[[package]]
-name = "crc"
-version = "3.0.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "53757d12b596c16c78b83458d732a5d1a17ab3f53f2f7412f6fb57cc8a140ab3"
-dependencies = [
- "crc-catalog",
-]
-
-[[package]]
-name = "crc-catalog"
-version = "2.1.0"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "2d0165d2900ae6778e36e80bbc4da3b5eefccee9ba939761f9c2882a5d9af3ff"
-
[[package]]
name = "crc32fast"
version = "1.3.2"
@@ -2332,7 +2317,6 @@ dependencies = [
"rustls 0.20.6",
"rustls-pemfile",
"sea-orm",
- "sea-query",
"secstr",
"serde",
"serde_bytes",
@@ -3428,15 +3412,14 @@ dependencies = [
[[package]]
name = "sea-orm"
-version = "0.10.7"
+version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "88694d01b528a94f90ad87f8d2f546d060d070eee180315c67d158cb69476034"
+checksum = "e7a0e3ec90718d849c73b167df7a476672b64c7ee5f3c582179069e63b2451e1"
dependencies = [
"async-stream",
"async-trait",
"chrono",
"futures",
- "futures-util",
"log",
"ouroboros",
"sea-orm-macros",
@@ -3453,9 +3436,9 @@ dependencies = [
[[package]]
name = "sea-orm-macros"
-version = "0.10.7"
+version = "0.11.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7216195de9c6b2474fd0efab486173dccd0eff21f28cc54aa4c0205d52fb3af0"
+checksum = "5d89f7d4d2533c178e08a9e1990619c391e9ca7b402851d02a605938b15e03d9"
dependencies = [
"bae",
"heck 0.3.3",
@@ -3466,9 +3449,9 @@ dependencies = [
[[package]]
name = "sea-query"
-version = "0.27.2"
+version = "0.28.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "a4f0fc4d8e44e1d51c739a68d336252a18bc59553778075d5e32649be6ec92ed"
+checksum = "d2fbe015dbdaa7d8829d71c1e14fb6289e928ac256b93dfda543c85cd89d6f03"
dependencies = [
"chrono",
"sea-query-derive",
@@ -3477,9 +3460,9 @@ dependencies = [
[[package]]
name = "sea-query-binder"
-version = "0.2.2"
+version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "9c2585b89c985cfacfe0ec9fc9e7bb055b776c1a2581c4e3c6185af2b8bf8865"
+checksum = "03548c63aec07afd4fd190923e0160d2f2fc92def27470b54154cf232da6203b"
dependencies = [
"chrono",
"sea-query",
@@ -3489,11 +3472,11 @@ dependencies = [
[[package]]
name = "sea-query-derive"
-version = "0.2.0"
+version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "34cdc022b4f606353fe5dc85b09713a04e433323b70163e81513b141c6ae6eb5"
+checksum = "63f62030c60f3a691f5fe251713b4e220b306e50a71e1d6f9cce1f24bb781978"
dependencies = [
- "heck 0.3.3",
+ "heck 0.4.0",
"proc-macro2",
"quote",
"syn",
@@ -3827,7 +3810,6 @@ dependencies = [
"byteorder",
"bytes",
"chrono",
- "crc",
"crossbeam-queue",
"digest 0.10.6",
"dirs",
@@ -3888,7 +3870,6 @@ dependencies = [
"once_cell",
"proc-macro2",
"quote",
- "sha2 0.10.6",
"sqlx-core",
"sqlx-rt",
"syn",
@@ -4508,9 +4489,7 @@ version = "1.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "422ee0de9031b5b948b97a8fc04e3aa35230001a722ddd27943e0be31564ce4c"
dependencies = [
- "getrandom 0.2.7",
"md-5",
- "serde",
]
[[package]]
diff --git a/server/Cargo.toml b/server/Cargo.toml
index 40f69dc..04d212f 100644
--- a/server/Cargo.toml
+++ b/server/Cargo.toml
@@ -72,10 +72,6 @@ version = "0.10.1"
[dependencies.lldap_auth]
path = "../auth"
-[dependencies.sea-query]
-version = "*"
-features = ["with-chrono"]
-
[dependencies.opaque-ke]
version = "0.6"
@@ -109,7 +105,7 @@ default-features = false
version = "0.24"
[dependencies.sea-orm]
-version= ">=0.10.7"
+version= "0.11"
default-features = false
features = ["macros", "with-chrono", "with-uuid", "sqlx-all", "runtime-actix-rustls"]
diff --git a/server/src/domain/model/users.rs b/server/src/domain/model/users.rs
index 32f8d86..84b583a 100644
--- a/server/src/domain/model/users.rs
+++ b/server/src/domain/model/users.rs
@@ -1,6 +1,6 @@
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.10.3
-use sea_orm::entity::prelude::*;
+use sea_orm::{entity::prelude::*, sea_query::BlobSize};
use serde::{Deserialize, Serialize};
use crate::domain::types::{JpegPhoto, UserId, Uuid};
@@ -56,9 +56,9 @@ impl ColumnTrait for Column {
Column::DisplayName => ColumnType::String(Some(255)),
Column::FirstName => ColumnType::String(Some(255)),
Column::LastName => ColumnType::String(Some(255)),
- Column::Avatar => ColumnType::Binary,
+ Column::Avatar => ColumnType::Binary(BlobSize::Long),
Column::CreationDate => ColumnType::DateTime,
- Column::PasswordHash => ColumnType::Binary,
+ Column::PasswordHash => ColumnType::Binary(BlobSize::Medium),
Column::TotpSecret => ColumnType::String(Some(64)),
Column::MfaType => ColumnType::String(Some(64)),
Column::Uuid => ColumnType::String(Some(36)),
diff --git a/server/src/domain/sql_group_backend_handler.rs b/server/src/domain/sql_group_backend_handler.rs
index 5367090..e5677a4 100644
--- a/server/src/domain/sql_group_backend_handler.rs
+++ b/server/src/domain/sql_group_backend_handler.rs
@@ -7,10 +7,10 @@ use crate::domain::{
};
use async_trait::async_trait;
use sea_orm::{
+ sea_query::{Cond, IntoCondition, SimpleExpr},
ActiveModelTrait, ActiveValue, ColumnTrait, EntityTrait, QueryFilter, QueryOrder, QuerySelect,
QueryTrait,
};
-use sea_query::{Cond, IntoCondition, SimpleExpr};
use tracing::{debug, instrument};
fn get_group_filter_expr(filter: GroupRequestFilter) -> Cond {
diff --git a/server/src/domain/sql_migrations.rs b/server/src/domain/sql_migrations.rs
index deb82c7..7be7b7f 100644
--- a/server/src/domain/sql_migrations.rs
+++ b/server/src/domain/sql_migrations.rs
@@ -2,8 +2,10 @@ use crate::domain::{
sql_tables::{DbConnection, SchemaVersion},
types::{GroupId, UserId, Uuid},
};
-use sea_orm::{ConnectionTrait, FromQueryResult, Statement, TransactionTrait};
-use sea_query::{ColumnDef, Expr, ForeignKey, ForeignKeyAction, Iden, Query, Table, Value};
+use sea_orm::{
+ sea_query::{self, ColumnDef, Expr, ForeignKey, ForeignKeyAction, Query, Table, Value},
+ ConnectionTrait, FromQueryResult, Iden, Statement, TransactionTrait,
+};
use serde::{Deserialize, Serialize};
use tracing::{info, instrument, warn};
diff --git a/server/src/domain/sql_tables.rs b/server/src/domain/sql_tables.rs
index 0a81363..b61fd93 100644
--- a/server/src/domain/sql_tables.rs
+++ b/server/src/domain/sql_tables.rs
@@ -7,12 +7,11 @@ pub type DbConnection = sea_orm::DatabaseConnection;
pub struct SchemaVersion(pub i16);
impl sea_orm::TryGetable for SchemaVersion {
- fn try_get(
+ fn try_get_by(
res: &sea_orm::QueryResult,
- pre: &str,
- col: &str,
+ index: I,
) -> Result {
- Ok(SchemaVersion(i16::try_get(res, pre, col)?))
+ Ok(SchemaVersion(i16::try_get_by(res, index)?))
}
}
diff --git a/server/src/domain/sql_user_backend_handler.rs b/server/src/domain/sql_user_backend_handler.rs
index 9220dff..a481565 100644
--- a/server/src/domain/sql_user_backend_handler.rs
+++ b/server/src/domain/sql_user_backend_handler.rs
@@ -8,11 +8,10 @@ use super::{
use async_trait::async_trait;
use sea_orm::{
entity::IntoActiveValue,
- sea_query::{Cond, Expr, IntoCondition, SimpleExpr},
+ sea_query::{Alias, Cond, Expr, IntoColumnRef, IntoCondition, SimpleExpr},
ActiveModelTrait, ActiveValue, ColumnTrait, EntityTrait, ModelTrait, QueryFilter, QueryOrder,
QuerySelect, QueryTrait, Set,
};
-use sea_query::{Alias, IntoColumnRef};
use std::collections::HashSet;
use tracing::{debug, instrument};
diff --git a/server/src/domain/types.rs b/server/src/domain/types.rs
index 494f8f9..99ee6f4 100644
--- a/server/src/domain/types.rs
+++ b/server/src/domain/types.rs
@@ -53,8 +53,11 @@ impl std::string::ToString for Uuid {
}
impl TryGetable for Uuid {
- fn try_get(res: &QueryResult, pre: &str, col: &str) -> std::result::Result {
- Ok(Uuid(String::try_get(res, pre, col)?))
+ fn try_get_by(
+ res: &QueryResult,
+ index: I,
+ ) -> std::result::Result {
+ Ok(Uuid(String::try_get_by(res, index)?))
}
}
@@ -142,8 +145,8 @@ impl From<&UserId> for Value {
}
impl TryGetable for UserId {
- fn try_get(res: &QueryResult, pre: &str, col: &str) -> Result {
- Ok(UserId::new(&String::try_get(res, pre, col)?))
+ fn try_get_by(res: &QueryResult, index: I) -> Result {
+ Ok(UserId::new(&String::try_get_by(res, index)?))
}
}
@@ -261,8 +264,8 @@ impl JpegPhoto {
}
impl TryGetable for JpegPhoto {
- fn try_get(res: &QueryResult, pre: &str, col: &str) -> Result {
- >>::try_from(Vec::::try_get(res, pre, col)?)
+ fn try_get_by(res: &QueryResult, index: I) -> Result {
+ >>::try_from(Vec::::try_get_by(res, index)?)
.map_err(|e| {
TryGetError::DbErr(DbErr::TryIntoErr {
from: "[u8]",
@@ -345,8 +348,8 @@ impl From for Value {
}
impl TryGetable for GroupId {
- fn try_get(res: &QueryResult, pre: &str, col: &str) -> Result {
- Ok(GroupId(i32::try_get(res, pre, col)?))
+ fn try_get_by(res: &QueryResult, index: I) -> Result {
+ Ok(GroupId(i32::try_get_by(res, index)?))
}
}
@@ -364,7 +367,7 @@ impl ValueType for GroupId {
}
fn column_type() -> ColumnType {
- ColumnType::Integer(None)
+ ColumnType::Integer
}
}
diff --git a/server/src/infra/jwt_sql_tables.rs b/server/src/infra/jwt_sql_tables.rs
index b3443c6..2998235 100644
--- a/server/src/infra/jwt_sql_tables.rs
+++ b/server/src/infra/jwt_sql_tables.rs
@@ -1,5 +1,7 @@
-use sea_orm::ConnectionTrait;
-use sea_query::{ColumnDef, ForeignKey, ForeignKeyAction, Iden, Table};
+use sea_orm::{
+ sea_query::{self, ColumnDef, ForeignKey, ForeignKeyAction, Iden, Table},
+ ConnectionTrait,
+};
pub use crate::domain::{sql_migrations::Users, sql_tables::DbConnection};
diff --git a/server/src/infra/sql_backend_handler.rs b/server/src/infra/sql_backend_handler.rs
index 253eca8..16e3f1a 100644
--- a/server/src/infra/sql_backend_handler.rs
+++ b/server/src/infra/sql_backend_handler.rs
@@ -7,10 +7,10 @@ use crate::domain::{
};
use async_trait::async_trait;
use sea_orm::{
- sea_query::Cond, ActiveModelTrait, ColumnTrait, EntityTrait, FromQueryResult, IntoActiveModel,
- QueryFilter, QuerySelect,
+ sea_query::{Cond, Expr},
+ ActiveModelTrait, ColumnTrait, EntityTrait, FromQueryResult, IntoActiveModel, QueryFilter,
+ QuerySelect,
};
-use sea_query::Expr;
use std::collections::HashSet;
use tracing::{debug, instrument};
From d04305433f79143b4f2dfbffb0831b2ff28550fd Mon Sep 17 00:00:00 2001
From: Valentin Tolmer
Date: Fri, 10 Feb 2023 12:43:49 +0100
Subject: [PATCH 24/62] server: use the new into_tuple from sea_orm
---
server/src/domain/sql_opaque_handler.rs | 10 +++-------
server/src/infra/sql_backend_handler.rs | 16 +++++-----------
2 files changed, 8 insertions(+), 18 deletions(-)
diff --git a/server/src/domain/sql_opaque_handler.rs b/server/src/domain/sql_opaque_handler.rs
index b2ded01..5a5667b 100644
--- a/server/src/domain/sql_opaque_handler.rs
+++ b/server/src/domain/sql_opaque_handler.rs
@@ -8,7 +8,7 @@ use super::{
};
use async_trait::async_trait;
use lldap_auth::opaque;
-use sea_orm::{ActiveModelTrait, ActiveValue, EntityTrait, FromQueryResult, QuerySelect};
+use sea_orm::{ActiveModelTrait, ActiveValue, EntityTrait, QuerySelect};
use secstr::SecUtf8;
use tracing::{debug, instrument};
@@ -50,18 +50,14 @@ impl SqlBackendHandler {
#[instrument(skip_all, level = "debug", err)]
async fn get_password_file_for_user(&self, user_id: UserId) -> Result