+1
-4
src/oauth_provider.rs
+1
-4
src/oauth_provider.rs
···
36
36
"Invalid identifier or password",
37
37
),
38
38
AuthResult::TwoFactorRequired(masked_email) => {
39
-
// Email sending step can be handled here if needed in the future.
40
-
41
-
// {"error":"second_authentication_factor_required","error_description":"emailOtp authentication factor required (hint: 2***0@p***m)","type":"emailOtp","hint":"2***0@p***m"}
42
39
let body_str = match serde_json::to_string(&serde_json::json!({
43
40
"error": "second_authentication_factor_required",
44
41
"error_description": format!("emailOtp authentication factor required (hint: {})", masked_email),
···
97
94
},
98
95
Err(err) => {
99
96
log::error!(
100
-
"Error during pre-auth check. This happens on the create_session endpoint when trying to decide if the user has access:\n {err}"
97
+
"Error during pre-auth check. This happens on the oauth signin endpoint when trying to decide if the user has access:\n {err}"
101
98
);
102
99
oauth_json_error_response(
103
100
StatusCode::BAD_REQUEST,
+5
-1
Cargo.toml
+5
-1
Cargo.toml
···
18
18
hex = "0.4"
19
19
jwt-compact = { version = "0.8.0", features = ["es256k"] }
20
20
scrypt = "0.11"
21
-
lettre = { version = "0.11.18", features = ["tokio1", "pool", "tokio1-native-tls"] }
21
+
#lettre = { version = "0.11.18", default-features = false, features = ["pool", "tokio1-rustls", "smtp-transport", "hostname", "builder"] }
22
+
#lettre = { version = "0.11", default-features = false, features = ["builder", "webpki-roots", "rustls", "aws-lc-rs", "smtp-transport", "tokio1", "tokio1-rustls"] }
23
+
aws-lc-rs = "1.13.0"
24
+
lettre = { version = "0.11", default-features = false, features = ["builder", "webpki-roots", "rustls", "aws-lc-rs", "smtp-transport", "tokio1", "tokio1-rustls"] }
25
+
rustls = { version = "0.23", default-features = false, features = ["tls12", "std", "logging", "aws_lc_rs"] }
22
26
handlebars = { version = "6.3.2", features = ["rust-embed"] }
23
27
rust-embed = "8.7.2"
24
28
axum-template = { version = "3.0.0", features = ["handlebars"] }
+79
-15
README.md
+79
-15
README.md
···
21
21
22
22
# Setup
23
23
24
-
We are getting close! Testing now
25
-
26
-
Nothing here yet! If you are brave enough to try before full release, let me know and I'll help you set it up.
27
-
But I want to run it locally on my own PDS first to test run it a bit.
28
-
29
-
Example Caddyfile (mostly so I don't lose it for now. Will have a better one in the future)
24
+
PDS Gatekeeper has 2 parts to its setup, docker compose file and a reverse proxy (Caddy in this case). I will be
25
+
assuming you setup the PDS following the directions
26
+
found [here](https://atproto.com/guides/self-hosting), but if yours is different, or you have questions, feel free to
27
+
let
28
+
me know, and we can figure it out.
29
+
30
+
## Docker compose
31
+
32
+
The pds gatekeeper container can be found on docker hub under the name `fatfingers23/pds_gatekeeper`. The container does
33
+
need access to the `/pds` root folder to access the same db's as your PDS. The part you need to add would look a bit
34
+
like below. You can find a full example of what I use for my pds at [./examples/compose.yml](./examples/compose.yml).
35
+
This is usually found at `/pds/compose.yaml`on your PDS>
36
+
37
+
```yml
38
+
gatekeeper:
39
+
container_name: gatekeeper
40
+
image: fatfingers23/pds_gatekeeper:arm-latest
41
+
network_mode: host
42
+
restart: unless-stopped
43
+
#This gives the container to the access to the PDS folder. Source is the location on your server of that directory
44
+
volumes:
45
+
- type: bind
46
+
source: /pds
47
+
target: /pds
48
+
depends_on:
49
+
- pds
50
+
```
51
+
52
+
## Caddy setup
53
+
54
+
For the reverse proxy I use caddy. This part is what overwrites the endpoints and proxies them to PDS gatekeeper to add
55
+
in extra functionality. The main part is below, for a full example see [./examples/Caddyfile](./examples/Caddyfile).
56
+
This is usually found at `/pds/caddy/etc/caddy/Caddyfile` on your PDS.
30
57
31
58
```caddyfile
32
-
http://localhost {
33
-
34
59
@gatekeeper {
35
-
path /xrpc/com.atproto.server.getSession
36
-
path /xrpc/com.atproto.server.updateEmail
37
-
path /xrpc/com.atproto.server.createSession
38
-
path /@atproto/oauth-provider/~api/sign-in
60
+
path /xrpc/com.atproto.server.getSession
61
+
path /xrpc/com.atproto.server.updateEmail
62
+
path /xrpc/com.atproto.server.createSession
63
+
path /@atproto/oauth-provider/~api/sign-in
39
64
}
40
65
41
66
handle @gatekeeper {
42
-
reverse_proxy http://localhost:8080
67
+
reverse_proxy http://localhost:8080
43
68
}
44
69
45
-
reverse_proxy /* http://localhost:3000
70
+
reverse_proxy http://localhost:3000
71
+
```
72
+
73
+
If you use a cloudflare tunnel then your caddyfile would look a bit more like below with your tunnel proxying to
74
+
`localhost:8081` (or w/e port you want).
75
+
76
+
```caddyfile
77
+
http://*.localhost:8082, http://localhost:8082 {
78
+
@gatekeeper {
79
+
path /xrpc/com.atproto.server.getSession
80
+
path /xrpc/com.atproto.server.updateEmail
81
+
path /xrpc/com.atproto.server.createSession
82
+
path /@atproto/oauth-provider/~api/sign-in
83
+
}
84
+
85
+
handle @gatekeeper {
86
+
reverse_proxy http://localhost:8080
87
+
}
88
+
89
+
reverse_proxy http://localhost:3000
46
90
}
47
91
48
-
```
92
+
```
93
+
94
+
# Environment variables and bonuses
95
+
96
+
Every environment variable can be set in the `pds.env` and shared between PDS and gatekeeper and the PDS, with the
97
+
exception of `PDS_ENV_LOCATION`. This can be set to load the pds.env, by default it checks `/pds/pds.env` and is
98
+
recommended to mount the `/pds` folder on the server to `/pds` in the pds gatekeeper container.
99
+
100
+
`PDS_DATA_DIRECTORY` - Root directory of the PDS. Same as the one found in `pds.env` this is how pds gatekeeper knows
101
+
knows the rest of the environment variables.
102
+
103
+
`GATEKEEPER_EMAIL_TEMPLATES_DIRECTORY` - The folder for templates of the emails PDS gatekeeper sends. You can find them
104
+
in [./email_templates](./email_templates). You are free to edit them as you please and set this variable to a location
105
+
in the pds gateekeper container and it will use them in place of the default ones. Just make sure ot keep the names the
106
+
same.
107
+
108
+
`PDS_BASE_URL` - Base url of the PDS. You most likely want `https://localhost:3000` which is also the default
109
+
110
+
`GATEKEEPER_HOST` - Host for pds gatekeeper. Defaults to `127.0.0.1`
111
+
112
+
`GATEKEEPER_PORT` - Port for pds gatekeeper. Defaults to `8080`
+29
examples/Caddyfile
+29
examples/Caddyfile
···
1
+
{
2
+
email youremail@myemail.com
3
+
on_demand_tls {
4
+
ask http://localhost:3000/tls-check
5
+
}
6
+
}
7
+
8
+
*.yourpds.com, yourpds.com {
9
+
tls {
10
+
on_demand
11
+
}
12
+
# You'll most likely just want from here to....
13
+
@gatekeeper {
14
+
path /xrpc/com.atproto.server.getSession
15
+
path /xrpc/com.atproto.server.updateEmail
16
+
path /xrpc/com.atproto.server.createSession
17
+
path /@atproto/oauth-provider/~api/sign-in
18
+
}
19
+
20
+
handle @gatekeeper {
21
+
#This is the address for PDS gatekeeper, default is 8080
22
+
reverse_proxy http://localhost:8080
23
+
}
24
+
25
+
reverse_proxy http://localhost:3000
26
+
#..here. Copy and paste this replacing the reverse_proxy http://localhost:3000 line
27
+
}
28
+
29
+
+51
examples/compose.yml
+51
examples/compose.yml
···
1
+
version: '3.9'
2
+
services:
3
+
caddy:
4
+
container_name: caddy
5
+
image: caddy:2
6
+
network_mode: host
7
+
depends_on:
8
+
- pds
9
+
restart: unless-stopped
10
+
volumes:
11
+
- type: bind
12
+
source: /pds/caddy/data
13
+
target: /data
14
+
- type: bind
15
+
source: /pds/caddy/etc/caddy
16
+
target: /etc/caddy
17
+
pds:
18
+
container_name: pds
19
+
image: ghcr.io/bluesky-social/pds:0.4
20
+
network_mode: host
21
+
restart: unless-stopped
22
+
volumes:
23
+
- type: bind
24
+
source: /pds
25
+
target: /pds
26
+
env_file:
27
+
- /pds/pds.env
28
+
watchtower:
29
+
container_name: watchtower
30
+
image: containrrr/watchtower:latest
31
+
network_mode: host
32
+
volumes:
33
+
- type: bind
34
+
source: /var/run/docker.sock
35
+
target: /var/run/docker.sock
36
+
restart: unless-stopped
37
+
environment:
38
+
WATCHTOWER_CLEANUP: true
39
+
WATCHTOWER_SCHEDULE: "@midnight"
40
+
gatekeeper:
41
+
container_name: gatekeeper
42
+
image: fatfingers23/pds_gatekeeper:arm-latest
43
+
network_mode: host
44
+
restart: unless-stopped
45
+
#This gives the container to the access to the PDS folder. Source is the location on your server of that directory
46
+
volumes:
47
+
- type: bind
48
+
source: /pds
49
+
target: /pds
50
+
depends_on:
51
+
- pds