Merge branch 'main' into inbox-tweaks-canary

This commit is contained in:
Cocoa 2023-03-28 19:13:00 +00:00
commit 61fbca9934
548 changed files with 23202 additions and 1078 deletions

61
.github/workflows/base-build.yml vendored Normal file
View File

@ -0,0 +1,61 @@
name: (package) base image nightly build
on:
workflow_dispatch:
schedule:
- cron: "25 1 * * *"
env:
IMAGE_NAME: base
jobs:
build:
if: github.repository == 'dreamwidth/dreamwidth'
runs-on: ubuntu-latest
permissions:
packages: write
steps:
- name: Checkout Code
uses: actions/checkout@v3
- name: Build image
run: docker build -t $IMAGE_NAME --label "runnumber=${GITHUB_RUN_ID}" etc/docker/$IMAGE_NAME
- name: Log in to registry
run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u $ --password-stdin
- name: Push image
run: |
IMAGE_ID=ghcr.io/${{ github.repository_owner }}/$IMAGE_NAME
# Change all uppercase to lowercase
IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]')
# Strip git ref prefix from version
VERSION=$(echo "${{ github.ref }}" | sed -e 's,.*/\(.*\),\1,')
# Strip "v" prefix from tag name
[[ "${{ github.ref }}" == "refs/tags/"* ]] && VERSION=$(echo $VERSION | sed -e 's/^v//')
# Use Docker `latest` tag convention for master/main
[ "$VERSION" == "master" ] && VERSION=latest
[ "$VERSION" == "main" ] && VERSION=latest
echo IMAGE_ID=$IMAGE_ID
echo VERSION=$VERSION
docker tag $IMAGE_NAME $IMAGE_ID:$VERSION
docker push $IMAGE_ID:$VERSION
# Get sha256 for later
IMAGE_DIGEST=$(docker inspect --format='{{index .RepoDigests 0}}' $IMAGE_NAME | cut -d@ -f2)
echo "IMAGE_DIGEST=$IMAGE_DIGEST" >> $GITHUB_ENV
- name: Notify Discord
uses: sarisia/actions-status-discord@v1
if: always()
with:
description: "Package digest: `${{ env.IMAGE_DIGEST }}`"
webhook: ${{ secrets.DISCORD_WEBHOOK }}

61
.github/workflows/proxy-build.yml vendored Normal file
View File

@ -0,0 +1,61 @@
name: (package) proxy manual build
# No on-push because proxy builds are incredibly rare, so might
# as well not waste the GHA minutes
on:
workflow_dispatch:
env:
IMAGE_NAME: proxy
jobs:
build:
if: github.repository == 'dreamwidth/dreamwidth'
runs-on: ubuntu-latest
permissions:
packages: write
steps:
- name: Checkout Code
uses: actions/checkout@v3
- name: Build image
run: docker build -t $IMAGE_NAME --label "runnumber=${GITHUB_RUN_ID}" etc/docker/$IMAGE_NAME
- name: Log in to registry
run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u $ --password-stdin
- name: Push image
run: |
IMAGE_ID=ghcr.io/${{ github.repository_owner }}/$IMAGE_NAME
# Change all uppercase to lowercase
IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]')
# Strip git ref prefix from version
VERSION=$(echo "${{ github.ref }}" | sed -e 's,.*/\(.*\),\1,')
# Strip "v" prefix from tag name
[[ "${{ github.ref }}" == "refs/tags/"* ]] && VERSION=$(echo $VERSION | sed -e 's/^v//')
# Use Docker `latest` tag convention for master/main
[ "$VERSION" == "master" ] && VERSION=latest
[ "$VERSION" == "main" ] && VERSION=latest
echo IMAGE_ID=$IMAGE_ID
echo VERSION=$VERSION
docker tag $IMAGE_NAME $IMAGE_ID:$VERSION
docker push $IMAGE_ID:$VERSION
# Get sha256 for later
IMAGE_DIGEST=$(docker inspect --format='{{index .RepoDigests 0}}' $IMAGE_NAME | cut -d@ -f2)
echo "IMAGE_DIGEST=$IMAGE_DIGEST" >> $GITHUB_ENV
- name: Notify Discord
uses: sarisia/actions-status-discord@v1
if: always()
with:
description: "Package digest: `${{ env.IMAGE_DIGEST }}`"
webhook: ${{ secrets.DISCORD_WEBHOOK }}

View File

@ -0,0 +1,101 @@
{
"containerDefinitions": [
{
"name": "web",
"image": "ghcr.io/dreamwidth/web:latest",
"cpu": 0,
"portMappings": [
{
"containerPort": 6081,
"hostPort": 6081,
"protocol": "tcp"
}
],
"essential": true,
"command": [
"bash",
"/opt/startup-prod.sh"
],
"environment": [],
"mountPoints": [
{
"sourceVolume": "dw-config",
"containerPath": "/dw/etc",
"readOnly": true
},
{
"sourceVolume": "log-share",
"containerPath": "/var/log/apache2",
"readOnly": false
}
],
"volumesFrom": [],
"linuxParameters": {
"initProcessEnabled": true
},
"logConfiguration": {
"logDriver": "awslogs",
"options": {
"awslogs-create-group": "true",
"awslogs-group": "/dreamwidth/web",
"awslogs-region": "us-east-1",
"awslogs-stream-prefix": "web"
}
}
},
{
"name": "cloudwatch-agent",
"image": "public.ecr.aws/cloudwatch-agent/cloudwatch-agent:latest",
"cpu": 0,
"portMappings": [],
"essential": true,
"environment": [],
"mountPoints": [
{
"sourceVolume": "log-share",
"containerPath": "/var/log/apache2",
"readOnly": true
}
],
"volumesFrom": [],
"secrets": [
{
"name": "CW_CONFIG_CONTENT",
"valueFrom": "ecs-cwagent"
}
],
"logConfiguration": {
"logDriver": "awslogs",
"options": {
"awslogs-create-group": "true",
"awslogs-group": "/ecs/ecs-cwagent",
"awslogs-region": "us-east-1",
"awslogs-stream-prefix": "ecs"
}
}
}
],
"family": "web-canary",
"taskRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskRole",
"executionRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskExecutionRole",
"networkMode": "awsvpc",
"volumes": [
{
"name": "log-share",
"host": {}
},
{
"name": "dw-config",
"efsVolumeConfiguration": {
"fileSystemId": "fs-f9f3e04d",
"rootDirectory": "/etc-canary",
"transitEncryption": "DISABLED"
}
}
],
"requiresCompatibilities": [
"FARGATE"
],
"cpu": "1024",
"memory": "6144"
}

View File

@ -0,0 +1,23 @@
{
"logs": {
"logs_collected": {
"files": {
"collect_list": [
{
"file_path": "/var/log/apache2/dreamwidth_access-DISABLED.log",
"log_group_name": "/dreamwidth/web-requests",
"log_stream_name": "{hostname}",
"timezone": "UTC",
"timestamp_format": "%Y-%m-%dT%H:%M:%S.%fZ",
"filters": [
{
"type": "exclude",
"expression": "ELB-HealthChecker"
}
]
}
]
}
}
}
}

View File

@ -0,0 +1,101 @@
{
"containerDefinitions": [
{
"name": "web",
"image": "ghcr.io/dreamwidth/web:latest",
"cpu": 0,
"portMappings": [
{
"containerPort": 6081,
"hostPort": 6081,
"protocol": "tcp"
}
],
"essential": true,
"command": [
"bash",
"/opt/startup-prod.sh"
],
"environment": [],
"mountPoints": [
{
"sourceVolume": "dw-config",
"containerPath": "/dw/etc",
"readOnly": true
},
{
"sourceVolume": "log-share",
"containerPath": "/var/log/apache2",
"readOnly": false
}
],
"volumesFrom": [],
"linuxParameters": {
"initProcessEnabled": true
},
"logConfiguration": {
"logDriver": "awslogs",
"options": {
"awslogs-create-group": "true",
"awslogs-group": "/dreamwidth/web",
"awslogs-region": "us-east-1",
"awslogs-stream-prefix": "web"
}
}
},
{
"name": "cloudwatch-agent",
"image": "public.ecr.aws/cloudwatch-agent/cloudwatch-agent:latest",
"cpu": 0,
"portMappings": [],
"essential": true,
"environment": [],
"mountPoints": [
{
"sourceVolume": "log-share",
"containerPath": "/var/log/apache2",
"readOnly": true
}
],
"volumesFrom": [],
"secrets": [
{
"name": "CW_CONFIG_CONTENT",
"valueFrom": "ecs-cwagent"
}
],
"logConfiguration": {
"logDriver": "awslogs",
"options": {
"awslogs-create-group": "true",
"awslogs-group": "/ecs/ecs-cwagent",
"awslogs-region": "us-east-1",
"awslogs-stream-prefix": "ecs"
}
}
}
],
"family": "web-stable",
"taskRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskRole",
"executionRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskExecutionRole",
"networkMode": "awsvpc",
"volumes": [
{
"name": "log-share",
"host": {}
},
{
"name": "dw-config",
"efsVolumeConfiguration": {
"fileSystemId": "fs-f9f3e04d",
"rootDirectory": "/etc-stable",
"transitEncryption": "DISABLED"
}
}
],
"requiresCompatibilities": [
"FARGATE"
],
"cpu": "1024",
"memory": "6144"
}

View File

@ -0,0 +1,57 @@
{
"containerDefinitions": [
{
"name": "worker",
"image": "ghcr.io/dreamwidth/worker:latest",
"cpu": 0,
"portMappings": [],
"essential": true,
"command": [
"bash",
"/opt/startup-prod.sh",
"bin/worker/birthday-notify",
"-v"
],
"environment": [],
"mountPoints": [
{
"sourceVolume": "dw-config",
"containerPath": "/dw/etc",
"readOnly": true
}
],
"volumesFrom": [],
"linuxParameters": {
"initProcessEnabled": true
},
"logConfiguration": {
"logDriver": "awslogs",
"options": {
"awslogs-create-group": "true",
"awslogs-group": "/dreamwidth/worker",
"awslogs-region": "us-east-1",
"awslogs-stream-prefix": "worker"
}
}
}
],
"family": "worker-birthday-notify",
"taskRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskRole",
"executionRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskExecutionRole",
"networkMode": "awsvpc",
"volumes": [
{
"name": "dw-config",
"efsVolumeConfiguration": {
"fileSystemId": "fs-f9f3e04d",
"rootDirectory": "/etc-workers",
"transitEncryption": "DISABLED"
}
}
],
"requiresCompatibilities": [
"FARGATE"
],
"cpu": "256",
"memory": "512"
}

View File

@ -0,0 +1,57 @@
{
"containerDefinitions": [
{
"name": "worker",
"image": "ghcr.io/dreamwidth/worker:latest",
"cpu": 0,
"portMappings": [],
"essential": true,
"command": [
"bash",
"/opt/startup-prod.sh",
"bin/worker/change-poster-id",
"-v"
],
"environment": [],
"mountPoints": [
{
"sourceVolume": "dw-config",
"containerPath": "/dw/etc",
"readOnly": true
}
],
"volumesFrom": [],
"linuxParameters": {
"initProcessEnabled": true
},
"logConfiguration": {
"logDriver": "awslogs",
"options": {
"awslogs-create-group": "true",
"awslogs-group": "/dreamwidth/worker",
"awslogs-region": "us-east-1",
"awslogs-stream-prefix": "worker"
}
}
}
],
"family": "worker-change-poster-id",
"taskRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskRole",
"executionRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskExecutionRole",
"networkMode": "awsvpc",
"volumes": [
{
"name": "dw-config",
"efsVolumeConfiguration": {
"fileSystemId": "fs-f9f3e04d",
"rootDirectory": "/etc-workers",
"transitEncryption": "DISABLED"
}
}
],
"requiresCompatibilities": [
"FARGATE"
],
"cpu": "256",
"memory": "512"
}

View File

@ -0,0 +1,57 @@
{
"containerDefinitions": [
{
"name": "worker",
"image": "ghcr.io/dreamwidth/worker:latest",
"cpu": 0,
"portMappings": [],
"essential": true,
"command": [
"bash",
"/opt/startup-prod.sh",
"bin/worker/codebuild-notifier",
"-v"
],
"environment": [],
"mountPoints": [
{
"sourceVolume": "dw-config",
"containerPath": "/dw/etc",
"readOnly": true
}
],
"volumesFrom": [],
"linuxParameters": {
"initProcessEnabled": true
},
"logConfiguration": {
"logDriver": "awslogs",
"options": {
"awslogs-create-group": "true",
"awslogs-group": "/dreamwidth/worker",
"awslogs-region": "us-east-1",
"awslogs-stream-prefix": "worker"
}
}
}
],
"family": "worker-codebuild-notifier",
"taskRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskRole",
"executionRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskExecutionRole",
"networkMode": "awsvpc",
"volumes": [
{
"name": "dw-config",
"efsVolumeConfiguration": {
"fileSystemId": "fs-f9f3e04d",
"rootDirectory": "/etc-workers",
"transitEncryption": "DISABLED"
}
}
],
"requiresCompatibilities": [
"FARGATE"
],
"cpu": "256",
"memory": "512"
}

View File

@ -0,0 +1,57 @@
{
"containerDefinitions": [
{
"name": "worker",
"image": "ghcr.io/dreamwidth/worker:latest",
"cpu": 0,
"portMappings": [],
"essential": true,
"command": [
"bash",
"/opt/startup-prod.sh",
"bin/worker/content-importer-lite",
"-v"
],
"environment": [],
"mountPoints": [
{
"sourceVolume": "dw-config",
"containerPath": "/dw/etc",
"readOnly": true
}
],
"volumesFrom": [],
"linuxParameters": {
"initProcessEnabled": true
},
"logConfiguration": {
"logDriver": "awslogs",
"options": {
"awslogs-create-group": "true",
"awslogs-group": "/dreamwidth/worker",
"awslogs-region": "us-east-1",
"awslogs-stream-prefix": "worker"
}
}
}
],
"family": "worker-content-importer-lite",
"taskRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskRole",
"executionRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskExecutionRole",
"networkMode": "awsvpc",
"volumes": [
{
"name": "dw-config",
"efsVolumeConfiguration": {
"fileSystemId": "fs-f9f3e04d",
"rootDirectory": "/etc-workers",
"transitEncryption": "DISABLED"
}
}
],
"requiresCompatibilities": [
"FARGATE"
],
"cpu": "256",
"memory": "512"
}

View File

@ -0,0 +1,57 @@
{
"containerDefinitions": [
{
"name": "worker",
"image": "ghcr.io/dreamwidth/worker:latest",
"cpu": 0,
"portMappings": [],
"essential": true,
"command": [
"bash",
"/opt/startup-prod.sh",
"bin/worker/content-importer",
"-v"
],
"environment": [],
"mountPoints": [
{
"sourceVolume": "dw-config",
"containerPath": "/dw/etc",
"readOnly": true
}
],
"volumesFrom": [],
"linuxParameters": {
"initProcessEnabled": true
},
"logConfiguration": {
"logDriver": "awslogs",
"options": {
"awslogs-create-group": "true",
"awslogs-group": "/dreamwidth/worker",
"awslogs-region": "us-east-1",
"awslogs-stream-prefix": "worker"
}
}
}
],
"family": "worker-content-importer",
"taskRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskRole",
"executionRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskExecutionRole",
"networkMode": "awsvpc",
"volumes": [
{
"name": "dw-config",
"efsVolumeConfiguration": {
"fileSystemId": "fs-f9f3e04d",
"rootDirectory": "/etc-workers",
"transitEncryption": "DISABLED"
}
}
],
"requiresCompatibilities": [
"FARGATE"
],
"cpu": "256",
"memory": "2048"
}

View File

@ -0,0 +1,57 @@
{
"containerDefinitions": [
{
"name": "worker",
"image": "ghcr.io/dreamwidth/worker:latest",
"cpu": 0,
"portMappings": [],
"essential": true,
"command": [
"bash",
"/opt/startup-prod.sh",
"bin/worker/content-importer-verify",
"-v"
],
"environment": [],
"mountPoints": [
{
"sourceVolume": "dw-config",
"containerPath": "/dw/etc",
"readOnly": true
}
],
"volumesFrom": [],
"linuxParameters": {
"initProcessEnabled": true
},
"logConfiguration": {
"logDriver": "awslogs",
"options": {
"awslogs-create-group": "true",
"awslogs-group": "/dreamwidth/worker",
"awslogs-region": "us-east-1",
"awslogs-stream-prefix": "worker"
}
}
}
],
"family": "worker-content-importer-verify",
"taskRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskRole",
"executionRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskExecutionRole",
"networkMode": "awsvpc",
"volumes": [
{
"name": "dw-config",
"efsVolumeConfiguration": {
"fileSystemId": "fs-f9f3e04d",
"rootDirectory": "/etc-workers",
"transitEncryption": "DISABLED"
}
}
],
"requiresCompatibilities": [
"FARGATE"
],
"cpu": "256",
"memory": "512"
}

View File

@ -0,0 +1,57 @@
{
"containerDefinitions": [
{
"name": "worker",
"image": "ghcr.io/dreamwidth/worker:latest",
"cpu": 0,
"portMappings": [],
"essential": true,
"command": [
"bash",
"/opt/startup-prod.sh",
"bin/worker/directory-meta",
"-v"
],
"environment": [],
"mountPoints": [
{
"sourceVolume": "dw-config",
"containerPath": "/dw/etc",
"readOnly": true
}
],
"volumesFrom": [],
"linuxParameters": {
"initProcessEnabled": true
},
"logConfiguration": {
"logDriver": "awslogs",
"options": {
"awslogs-create-group": "true",
"awslogs-group": "/dreamwidth/worker",
"awslogs-region": "us-east-1",
"awslogs-stream-prefix": "worker"
}
}
}
],
"family": "worker-directory-meta",
"taskRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskRole",
"executionRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskExecutionRole",
"networkMode": "awsvpc",
"volumes": [
{
"name": "dw-config",
"efsVolumeConfiguration": {
"fileSystemId": "fs-f9f3e04d",
"rootDirectory": "/etc-workers",
"transitEncryption": "DISABLED"
}
}
],
"requiresCompatibilities": [
"FARGATE"
],
"cpu": "256",
"memory": "512"
}

View File

@ -0,0 +1,57 @@
{
"containerDefinitions": [
{
"name": "worker",
"image": "ghcr.io/dreamwidth/worker:latest",
"cpu": 0,
"portMappings": [],
"essential": true,
"command": [
"bash",
"/opt/startup-prod.sh",
"bin/worker/distribute-invites",
"-v"
],
"environment": [],
"mountPoints": [
{
"sourceVolume": "dw-config",
"containerPath": "/dw/etc",
"readOnly": true
}
],
"volumesFrom": [],
"linuxParameters": {
"initProcessEnabled": true
},
"logConfiguration": {
"logDriver": "awslogs",
"options": {
"awslogs-create-group": "true",
"awslogs-group": "/dreamwidth/worker",
"awslogs-region": "us-east-1",
"awslogs-stream-prefix": "worker"
}
}
}
],
"family": "worker-distribute-invites",
"taskRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskRole",
"executionRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskExecutionRole",
"networkMode": "awsvpc",
"volumes": [
{
"name": "dw-config",
"efsVolumeConfiguration": {
"fileSystemId": "fs-f9f3e04d",
"rootDirectory": "/etc-workers",
"transitEncryption": "DISABLED"
}
}
],
"requiresCompatibilities": [
"FARGATE"
],
"cpu": "256",
"memory": "512"
}

View File

@ -0,0 +1,57 @@
{
"containerDefinitions": [
{
"name": "worker",
"image": "ghcr.io/dreamwidth/worker:latest",
"cpu": 0,
"portMappings": [],
"essential": true,
"command": [
"bash",
"/opt/startup-prod.sh",
"bin/worker/dw-esn-cluster-subs",
"-v"
],
"environment": [],
"mountPoints": [
{
"sourceVolume": "dw-config",
"containerPath": "/dw/etc",
"readOnly": true
}
],
"volumesFrom": [],
"linuxParameters": {
"initProcessEnabled": true
},
"logConfiguration": {
"logDriver": "awslogs",
"options": {
"awslogs-create-group": "true",
"awslogs-group": "/dreamwidth/worker",
"awslogs-region": "us-east-1",
"awslogs-stream-prefix": "worker"
}
}
}
],
"family": "worker-dw-esn-cluster-subs",
"taskRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskRole",
"executionRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskExecutionRole",
"networkMode": "awsvpc",
"volumes": [
{
"name": "dw-config",
"efsVolumeConfiguration": {
"fileSystemId": "fs-f9f3e04d",
"rootDirectory": "/etc-workers",
"transitEncryption": "DISABLED"
}
}
],
"requiresCompatibilities": [
"FARGATE"
],
"cpu": "256",
"memory": "512"
}

View File

@ -0,0 +1,57 @@
{
"containerDefinitions": [
{
"name": "worker",
"image": "ghcr.io/dreamwidth/worker:latest",
"cpu": 0,
"portMappings": [],
"essential": true,
"command": [
"bash",
"/opt/startup-prod.sh",
"bin/worker/dw-esn-filter-subs",
"-v"
],
"environment": [],
"mountPoints": [
{
"sourceVolume": "dw-config",
"containerPath": "/dw/etc",
"readOnly": true
}
],
"volumesFrom": [],
"linuxParameters": {
"initProcessEnabled": true
},
"logConfiguration": {
"logDriver": "awslogs",
"options": {
"awslogs-create-group": "true",
"awslogs-group": "/dreamwidth/worker",
"awslogs-region": "us-east-1",
"awslogs-stream-prefix": "worker"
}
}
}
],
"family": "worker-dw-esn-filter-subs",
"taskRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskRole",
"executionRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskExecutionRole",
"networkMode": "awsvpc",
"volumes": [
{
"name": "dw-config",
"efsVolumeConfiguration": {
"fileSystemId": "fs-f9f3e04d",
"rootDirectory": "/etc-workers",
"transitEncryption": "DISABLED"
}
}
],
"requiresCompatibilities": [
"FARGATE"
],
"cpu": "256",
"memory": "512"
}

View File

@ -0,0 +1,57 @@
{
"containerDefinitions": [
{
"name": "worker",
"image": "ghcr.io/dreamwidth/worker:latest",
"cpu": 0,
"portMappings": [],
"essential": true,
"command": [
"bash",
"/opt/startup-prod.sh",
"bin/worker/dw-esn-fired-event",
"-v"
],
"environment": [],
"mountPoints": [
{
"sourceVolume": "dw-config",
"containerPath": "/dw/etc",
"readOnly": true
}
],
"volumesFrom": [],
"linuxParameters": {
"initProcessEnabled": true
},
"logConfiguration": {
"logDriver": "awslogs",
"options": {
"awslogs-create-group": "true",
"awslogs-group": "/dreamwidth/worker",
"awslogs-region": "us-east-1",
"awslogs-stream-prefix": "worker"
}
}
}
],
"family": "worker-dw-esn-fired-event",
"taskRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskRole",
"executionRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskExecutionRole",
"networkMode": "awsvpc",
"volumes": [
{
"name": "dw-config",
"efsVolumeConfiguration": {
"fileSystemId": "fs-f9f3e04d",
"rootDirectory": "/etc-workers",
"transitEncryption": "DISABLED"
}
}
],
"requiresCompatibilities": [
"FARGATE"
],
"cpu": "256",
"memory": "512"
}

View File

@ -0,0 +1,57 @@
{
"containerDefinitions": [
{
"name": "worker",
"image": "ghcr.io/dreamwidth/worker:latest",
"cpu": 0,
"portMappings": [],
"essential": true,
"command": [
"bash",
"/opt/startup-prod.sh",
"bin/worker/dw-esn-process-sub",
"-v"
],
"environment": [],
"mountPoints": [
{
"sourceVolume": "dw-config",
"containerPath": "/dw/etc",
"readOnly": true
}
],
"volumesFrom": [],
"linuxParameters": {
"initProcessEnabled": true
},
"logConfiguration": {
"logDriver": "awslogs",
"options": {
"awslogs-create-group": "true",
"awslogs-group": "/dreamwidth/worker",
"awslogs-region": "us-east-1",
"awslogs-stream-prefix": "worker"
}
}
}
],
"family": "worker-dw-esn-process-sub",
"taskRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskRole",
"executionRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskExecutionRole",
"networkMode": "awsvpc",
"volumes": [
{
"name": "dw-config",
"efsVolumeConfiguration": {
"fileSystemId": "fs-f9f3e04d",
"rootDirectory": "/etc-workers",
"transitEncryption": "DISABLED"
}
}
],
"requiresCompatibilities": [
"FARGATE"
],
"cpu": "256",
"memory": "512"
}

View File

@ -0,0 +1,57 @@
{
"containerDefinitions": [
{
"name": "worker",
"image": "ghcr.io/dreamwidth/worker:latest",
"cpu": 0,
"portMappings": [],
"essential": true,
"command": [
"bash",
"/opt/startup-prod.sh",
"bin/worker/dw-send-email",
"-v"
],
"environment": [],
"mountPoints": [
{
"sourceVolume": "dw-config",
"containerPath": "/dw/etc",
"readOnly": true
}
],
"volumesFrom": [],
"linuxParameters": {
"initProcessEnabled": true
},
"logConfiguration": {
"logDriver": "awslogs",
"options": {
"awslogs-create-group": "true",
"awslogs-group": "/dreamwidth/worker",
"awslogs-region": "us-east-1",
"awslogs-stream-prefix": "worker"
}
}
}
],
"family": "worker-dw-send-email",
"taskRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskRole",
"executionRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskExecutionRole",
"networkMode": "awsvpc",
"volumes": [
{
"name": "dw-config",
"efsVolumeConfiguration": {
"fileSystemId": "fs-f9f3e04d",
"rootDirectory": "/etc-workers",
"transitEncryption": "DISABLED"
}
}
],
"requiresCompatibilities": [
"FARGATE"
],
"cpu": "256",
"memory": "512"
}

View File

@ -0,0 +1,57 @@
{
"containerDefinitions": [
{
"name": "worker",
"image": "ghcr.io/dreamwidth/worker:latest",
"cpu": 0,
"portMappings": [],
"essential": true,
"command": [
"bash",
"/opt/startup-prod.sh",
"bin/worker/dw-sphinx-copier",
"-v"
],
"environment": [],
"mountPoints": [
{
"sourceVolume": "dw-config",
"containerPath": "/dw/etc",
"readOnly": true
}
],
"volumesFrom": [],
"linuxParameters": {
"initProcessEnabled": true
},
"logConfiguration": {
"logDriver": "awslogs",
"options": {
"awslogs-create-group": "true",
"awslogs-group": "/dreamwidth/worker",
"awslogs-region": "us-east-1",
"awslogs-stream-prefix": "worker"
}
}
}
],
"family": "worker-dw-sphinx-copier",
"taskRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskRole",
"executionRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskExecutionRole",
"networkMode": "awsvpc",
"volumes": [
{
"name": "dw-config",
"efsVolumeConfiguration": {
"fileSystemId": "fs-f9f3e04d",
"rootDirectory": "/etc-workers",
"transitEncryption": "DISABLED"
}
}
],
"requiresCompatibilities": [
"FARGATE"
],
"cpu": "256",
"memory": "512"
}

View File

@ -0,0 +1,57 @@
{
"containerDefinitions": [
{
"name": "worker",
"image": "ghcr.io/dreamwidth/worker:latest",
"cpu": 0,
"portMappings": [],
"essential": true,
"command": [
"bash",
"/opt/startup-prod.sh",
"bin/worker/embeds",
"-v"
],
"environment": [],
"mountPoints": [
{
"sourceVolume": "dw-config",
"containerPath": "/dw/etc",
"readOnly": true
}
],
"volumesFrom": [],
"linuxParameters": {
"initProcessEnabled": true
},
"logConfiguration": {
"logDriver": "awslogs",
"options": {
"awslogs-create-group": "true",
"awslogs-group": "/dreamwidth/worker",
"awslogs-region": "us-east-1",
"awslogs-stream-prefix": "worker"
}
}
}
],
"family": "worker-embeds",
"taskRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskRole",
"executionRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskExecutionRole",
"networkMode": "awsvpc",
"volumes": [
{
"name": "dw-config",
"efsVolumeConfiguration": {
"fileSystemId": "fs-f9f3e04d",
"rootDirectory": "/etc-workers",
"transitEncryption": "DISABLED"
}
}
],
"requiresCompatibilities": [
"FARGATE"
],
"cpu": "256",
"memory": "512"
}

View File

@ -0,0 +1,57 @@
{
"containerDefinitions": [
{
"name": "worker",
"image": "ghcr.io/dreamwidth/worker:latest",
"cpu": 0,
"portMappings": [],
"essential": true,
"command": [
"bash",
"/opt/startup-prod.sh",
"bin/worker/esn-cluster-subs",
"-v"
],
"environment": [],
"mountPoints": [
{
"sourceVolume": "dw-config",
"containerPath": "/dw/etc",
"readOnly": true
}
],
"volumesFrom": [],
"linuxParameters": {
"initProcessEnabled": true
},
"logConfiguration": {
"logDriver": "awslogs",
"options": {
"awslogs-create-group": "true",
"awslogs-group": "/dreamwidth/worker",
"awslogs-region": "us-east-1",
"awslogs-stream-prefix": "worker"
}
}
}
],
"family": "worker-esn-cluster-subs",
"taskRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskRole",
"executionRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskExecutionRole",
"networkMode": "awsvpc",
"volumes": [
{
"name": "dw-config",
"efsVolumeConfiguration": {
"fileSystemId": "fs-f9f3e04d",
"rootDirectory": "/etc-workers",
"transitEncryption": "DISABLED"
}
}
],
"requiresCompatibilities": [
"FARGATE"
],
"cpu": "256",
"memory": "512"
}

View File

@ -0,0 +1,57 @@
{
"containerDefinitions": [
{
"name": "worker",
"image": "ghcr.io/dreamwidth/worker:latest",
"cpu": 0,
"portMappings": [],
"essential": true,
"command": [
"bash",
"/opt/startup-prod.sh",
"bin/worker/esn-filter-subs",
"-v"
],
"environment": [],
"mountPoints": [
{
"sourceVolume": "dw-config",
"containerPath": "/dw/etc",
"readOnly": true
}
],
"volumesFrom": [],
"linuxParameters": {
"initProcessEnabled": true
},
"logConfiguration": {
"logDriver": "awslogs",
"options": {
"awslogs-create-group": "true",
"awslogs-group": "/dreamwidth/worker",
"awslogs-region": "us-east-1",
"awslogs-stream-prefix": "worker"
}
}
}
],
"family": "worker-esn-filter-subs",
"taskRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskRole",
"executionRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskExecutionRole",
"networkMode": "awsvpc",
"volumes": [
{
"name": "dw-config",
"efsVolumeConfiguration": {
"fileSystemId": "fs-f9f3e04d",
"rootDirectory": "/etc-workers",
"transitEncryption": "DISABLED"
}
}
],
"requiresCompatibilities": [
"FARGATE"
],
"cpu": "256",
"memory": "512"
}

View File

@ -0,0 +1,57 @@
{
"containerDefinitions": [
{
"name": "worker",
"image": "ghcr.io/dreamwidth/worker:latest",
"cpu": 0,
"portMappings": [],
"essential": true,
"command": [
"bash",
"/opt/startup-prod.sh",
"bin/worker/esn-fired-event",
"-v"
],
"environment": [],
"mountPoints": [
{
"sourceVolume": "dw-config",
"containerPath": "/dw/etc",
"readOnly": true
}
],
"volumesFrom": [],
"linuxParameters": {
"initProcessEnabled": true
},
"logConfiguration": {
"logDriver": "awslogs",
"options": {
"awslogs-create-group": "true",
"awslogs-group": "/dreamwidth/worker",
"awslogs-region": "us-east-1",
"awslogs-stream-prefix": "worker"
}
}
}
],
"family": "worker-esn-fired-event",
"taskRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskRole",
"executionRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskExecutionRole",
"networkMode": "awsvpc",
"volumes": [
{
"name": "dw-config",
"efsVolumeConfiguration": {
"fileSystemId": "fs-f9f3e04d",
"rootDirectory": "/etc-workers",
"transitEncryption": "DISABLED"
}
}
],
"requiresCompatibilities": [
"FARGATE"
],
"cpu": "256",
"memory": "512"
}

View File

@ -0,0 +1,57 @@
{
"containerDefinitions": [
{
"name": "worker",
"image": "ghcr.io/dreamwidth/worker:latest",
"cpu": 0,
"portMappings": [],
"essential": true,
"command": [
"bash",
"/opt/startup-prod.sh",
"bin/worker/esn-process-sub",
"-v"
],
"environment": [],
"mountPoints": [
{
"sourceVolume": "dw-config",
"containerPath": "/dw/etc",
"readOnly": true
}
],
"volumesFrom": [],
"linuxParameters": {
"initProcessEnabled": true
},
"logConfiguration": {
"logDriver": "awslogs",
"options": {
"awslogs-create-group": "true",
"awslogs-group": "/dreamwidth/worker",
"awslogs-region": "us-east-1",
"awslogs-stream-prefix": "worker"
}
}
}
],
"family": "worker-esn-process-sub",
"taskRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskRole",
"executionRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskExecutionRole",
"networkMode": "awsvpc",
"volumes": [
{
"name": "dw-config",
"efsVolumeConfiguration": {
"fileSystemId": "fs-f9f3e04d",
"rootDirectory": "/etc-workers",
"transitEncryption": "DISABLED"
}
}
],
"requiresCompatibilities": [
"FARGATE"
],
"cpu": "256",
"memory": "512"
}

View File

@ -0,0 +1,57 @@
{
"containerDefinitions": [
{
"name": "worker",
"image": "ghcr.io/dreamwidth/worker:latest",
"cpu": 0,
"portMappings": [],
"essential": true,
"command": [
"bash",
"/opt/startup-prod.sh",
"bin/worker/resolve-extacct",
"-v"
],
"environment": [],
"mountPoints": [
{
"sourceVolume": "dw-config",
"containerPath": "/dw/etc",
"readOnly": true
}
],
"volumesFrom": [],
"linuxParameters": {
"initProcessEnabled": true
},
"logConfiguration": {
"logDriver": "awslogs",
"options": {
"awslogs-create-group": "true",
"awslogs-group": "/dreamwidth/worker",
"awslogs-region": "us-east-1",
"awslogs-stream-prefix": "worker"
}
}
}
],
"family": "worker-resolve-extacct",
"taskRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskRole",
"executionRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskExecutionRole",
"networkMode": "awsvpc",
"volumes": [
{
"name": "dw-config",
"efsVolumeConfiguration": {
"fileSystemId": "fs-f9f3e04d",
"rootDirectory": "/etc-workers",
"transitEncryption": "DISABLED"
}
}
],
"requiresCompatibilities": [
"FARGATE"
],
"cpu": "256",
"memory": "512"
}

View File

@ -0,0 +1,57 @@
{
"containerDefinitions": [
{
"name": "worker",
"image": "ghcr.io/dreamwidth/worker:latest",
"cpu": 0,
"portMappings": [],
"essential": true,
"command": [
"bash",
"/opt/startup-prod.sh",
"bin/worker/send-email-ses",
"-v"
],
"environment": [],
"mountPoints": [
{
"sourceVolume": "dw-config",
"containerPath": "/dw/etc",
"readOnly": true
}
],
"volumesFrom": [],
"linuxParameters": {
"initProcessEnabled": true
},
"logConfiguration": {
"logDriver": "awslogs",
"options": {
"awslogs-create-group": "true",
"awslogs-group": "/dreamwidth/worker",
"awslogs-region": "us-east-1",
"awslogs-stream-prefix": "worker"
}
}
}
],
"family": "worker-send-email-ses",
"taskRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskRole",
"executionRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskExecutionRole",
"networkMode": "awsvpc",
"volumes": [
{
"name": "dw-config",
"efsVolumeConfiguration": {
"fileSystemId": "fs-f9f3e04d",
"rootDirectory": "/etc-workers",
"transitEncryption": "DISABLED"
}
}
],
"requiresCompatibilities": [
"FARGATE"
],
"cpu": "256",
"memory": "512"
}

View File

@ -0,0 +1,57 @@
{
"containerDefinitions": [
{
"name": "worker",
"image": "ghcr.io/dreamwidth/worker:latest",
"cpu": 0,
"portMappings": [],
"essential": true,
"command": [
"bash",
"/opt/startup-prod.sh",
"bin/worker/[% name %]",
"-v"
],
"environment": [],
"mountPoints": [
{
"sourceVolume": "dw-config",
"containerPath": "/dw/etc",
"readOnly": true
}
],
"volumesFrom": [],
"linuxParameters": {
"initProcessEnabled": true
},
"logConfiguration": {
"logDriver": "awslogs",
"options": {
"awslogs-create-group": "true",
"awslogs-group": "/dreamwidth/worker",
"awslogs-region": "us-east-1",
"awslogs-stream-prefix": "worker"
}
}
}
],
"family": "worker-[% name %]",
"taskRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskRole",
"executionRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskExecutionRole",
"networkMode": "awsvpc",
"volumes": [
{
"name": "dw-config",
"efsVolumeConfiguration": {
"fileSystemId": "fs-f9f3e04d",
"rootDirectory": "/etc-workers",
"transitEncryption": "DISABLED"
}
}
],
"requiresCompatibilities": [
"FARGATE"
],
"cpu": "[% cpu %]",
"memory": "[% memory %]"
}

View File

@ -0,0 +1,57 @@
{
"containerDefinitions": [
{
"name": "worker",
"image": "ghcr.io/dreamwidth/worker:latest",
"cpu": 0,
"portMappings": [],
"essential": true,
"command": [
"bash",
"/opt/startup-prod.sh",
"bin/worker/spellcheck-gm",
"-v"
],
"environment": [],
"mountPoints": [
{
"sourceVolume": "dw-config",
"containerPath": "/dw/etc",
"readOnly": true
}
],
"volumesFrom": [],
"linuxParameters": {
"initProcessEnabled": true
},
"logConfiguration": {
"logDriver": "awslogs",
"options": {
"awslogs-create-group": "true",
"awslogs-group": "/dreamwidth/worker",
"awslogs-region": "us-east-1",
"awslogs-stream-prefix": "worker"
}
}
}
],
"family": "worker-spellcheck-gm",
"taskRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskRole",
"executionRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskExecutionRole",
"networkMode": "awsvpc",
"volumes": [
{
"name": "dw-config",
"efsVolumeConfiguration": {
"fileSystemId": "fs-f9f3e04d",
"rootDirectory": "/etc-workers",
"transitEncryption": "DISABLED"
}
}
],
"requiresCompatibilities": [
"FARGATE"
],
"cpu": "256",
"memory": "512"
}

View File

@ -0,0 +1,57 @@
{
"containerDefinitions": [
{
"name": "worker",
"image": "ghcr.io/dreamwidth/worker:latest",
"cpu": 0,
"portMappings": [],
"essential": true,
"command": [
"bash",
"/opt/startup-prod.sh",
"bin/worker/sphinx-copier",
"-v"
],
"environment": [],
"mountPoints": [
{
"sourceVolume": "dw-config",
"containerPath": "/dw/etc",
"readOnly": true
}
],
"volumesFrom": [],
"linuxParameters": {
"initProcessEnabled": true
},
"logConfiguration": {
"logDriver": "awslogs",
"options": {
"awslogs-create-group": "true",
"awslogs-group": "/dreamwidth/worker",
"awslogs-region": "us-east-1",
"awslogs-stream-prefix": "worker"
}
}
}
],
"family": "worker-sphinx-copier",
"taskRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskRole",
"executionRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskExecutionRole",
"networkMode": "awsvpc",
"volumes": [
{
"name": "dw-config",
"efsVolumeConfiguration": {
"fileSystemId": "fs-f9f3e04d",
"rootDirectory": "/etc-workers",
"transitEncryption": "DISABLED"
}
}
],
"requiresCompatibilities": [
"FARGATE"
],
"cpu": "256",
"memory": "512"
}

View File

@ -0,0 +1,57 @@
{
"containerDefinitions": [
{
"name": "worker",
"image": "ghcr.io/dreamwidth/worker:latest",
"cpu": 0,
"portMappings": [],
"essential": true,
"command": [
"bash",
"/opt/startup-prod.sh",
"bin/worker/sphinx-search-gm",
"-v"
],
"environment": [],
"mountPoints": [
{
"sourceVolume": "dw-config",
"containerPath": "/dw/etc",
"readOnly": true
}
],
"volumesFrom": [],
"linuxParameters": {
"initProcessEnabled": true
},
"logConfiguration": {
"logDriver": "awslogs",
"options": {
"awslogs-create-group": "true",
"awslogs-group": "/dreamwidth/worker",
"awslogs-region": "us-east-1",
"awslogs-stream-prefix": "worker"
}
}
}
],
"family": "worker-sphinx-search-gm",
"taskRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskRole",
"executionRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskExecutionRole",
"networkMode": "awsvpc",
"volumes": [
{
"name": "dw-config",
"efsVolumeConfiguration": {
"fileSystemId": "fs-f9f3e04d",
"rootDirectory": "/etc-workers",
"transitEncryption": "DISABLED"
}
}
],
"requiresCompatibilities": [
"FARGATE"
],
"cpu": "256",
"memory": "512"
}

View File

@ -0,0 +1,57 @@
{
"containerDefinitions": [
{
"name": "worker",
"image": "ghcr.io/dreamwidth/worker:latest",
"cpu": 0,
"portMappings": [],
"essential": true,
"command": [
"bash",
"/opt/startup-prod.sh",
"bin/worker/synsuck",
"-v"
],
"environment": [],
"mountPoints": [
{
"sourceVolume": "dw-config",
"containerPath": "/dw/etc",
"readOnly": true
}
],
"volumesFrom": [],
"linuxParameters": {
"initProcessEnabled": true
},
"logConfiguration": {
"logDriver": "awslogs",
"options": {
"awslogs-create-group": "true",
"awslogs-group": "/dreamwidth/worker",
"awslogs-region": "us-east-1",
"awslogs-stream-prefix": "worker"
}
}
}
],
"family": "worker-synsuck",
"taskRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskRole",
"executionRoleArn": "arn:aws:iam::194396987458:role/dreamwidth-ecsTaskExecutionRole",
"networkMode": "awsvpc",
"volumes": [
{
"name": "dw-config",
"efsVolumeConfiguration": {
"fileSystemId": "fs-f9f3e04d",
"rootDirectory": "/etc-workers",
"transitEncryption": "DISABLED"
}
}
],
"requiresCompatibilities": [
"FARGATE"
],
"cpu": "256",
"memory": "512"
}

90
.github/workflows/update-workflows.pl vendored Normal file
View File

@ -0,0 +1,90 @@
#!/usr/bin/perl
#
# update-workflows.pl
#
# Update the worker workflow files. This file also contains information about
# the workers that run in ECS... that should really be somewhere else, but we
# have it here for now.
#
# Authors:
# Mark Smith <mark@dreamwidth.org>
#
# Copyright (c) 2022 by Dreamwidth Studios, LLC.
#
# This program is free software; you may redistribute it and/or modify it under
# the same terms as Perl itself. For a copy of the license, please reference
# 'perldoc perlartistic' or 'perldoc perlgpl'.
#
use strict;
use v5.10;
use lib "$ENV{LJHOME}/extlib/lib/perl5";
use Template;
my %workers = (
# Name MinCt, MaxCt, Memory, MilliCpu, TgtCpu
# New SQS based workers
'dw-esn-cluster-subs' => [ 1, 50, 512, 256, 50, ],
'dw-esn-filter-subs' => [ 1, 50, 512, 256, 50, ],
'dw-esn-fired-event' => [ 1, 50, 512, 256, 50, ],
'dw-esn-process-sub' => [ 1, 50, 512, 256, 50, ],
'dw-sphinx-copier' => [ 1, 50, 512, 256, 50, ],
# Old style ESN workers, mostly deprecated, we keep one each around just
# in case something ends up in the queue
'esn-cluster-subs' => [ 1, 10, 512, 256, 50, ],
'esn-filter-subs' => [ 1, 10, 512, 256, 50, ],
'esn-fired-event' => [ 1, 10, 512, 256, 50, ],
'esn-process-sub' => [ 1, 10, 512, 256, 50, ],
# Importer workers
'content-importer-verify' => [ 1, 1, 512, 256, 50 ],
'content-importer-lite' => [ 4, 4, 512, 256, 50 ],
'content-importer' => [ 2, 2, 2048, 256, 50 ],
# Other workers
'birthday-notify' => [ 1, 1, 512, 256, 50, ],
'change-poster-id' => [ 1, 1, 512, 256, 50, ],
'directory-meta' => [ 1, 1, 512, 256, 50, ],
'distribute-invites' => [ 1, 1, 512, 256, 50, ],
'dw-send-email' => [ 1, 50, 512, 256, 50, ],
'embeds' => [ 1, 15, 512, 256, 50, ],
'resolve-extacct' => [ 1, 1, 512, 256, 50, ],
'send-email-ses' => [ 1, 1, 512, 256, 50, ],
'spellcheck-gm' => [ 1, 1, 512, 256, 50, ],
'sphinx-copier' => [ 1, 1, 512, 256, 50, ],
'sphinx-search-gm' => [ 1, 1, 512, 256, 50, ],
'synsuck' => [ 1, 20, 512, 256, 50, ],
# Misc site utilities
'codebuild-notifier' => [ 1, 1, 512, 256, 50, ],
#'metrics-emitter' => [ 1, 1, 512, 256, 50, ],
# DO NOT run these in k8s... until we have some way of having a dedicated IP,
# we keep getting banned by LJ.
# 'xpost' => [ 1, 1, '300M', '50m' ],
# importers...
);
# Generate deployment workflow
my $tt = Template->new() or die;
$tt->process( 'worker-deploy.tt', { workers => \%workers }, 'worker-deploy.yml' )
or die $tt->error;
# Generate task JSONs
foreach my $worker ( keys %workers ) {
$tt->process(
'tasks/worker-service.tt',
{
name => $worker,
cpu => $workers{$worker}->[3],
memory => $workers{$worker}->[2],
},
"tasks/worker-$worker-service.json"
) or die $tt->error;
}

68
.github/workflows/web-build.yml vendored Normal file
View File

@ -0,0 +1,68 @@
name: (package) web automatic build
on:
push:
branches:
- main
paths:
- cgi-bin/**
- htdocs/**
- views/**
- schemes/**
- src/s2/**
workflow_dispatch:
env:
IMAGE_NAME: web
jobs:
build:
if: github.repository == 'dreamwidth/dreamwidth'
runs-on: ubuntu-latest
permissions:
packages: write
steps:
- name: Checkout Code
uses: actions/checkout@v3
- name: Build image
run: docker build -t $IMAGE_NAME --label "runnumber=${GITHUB_RUN_ID}" etc/docker/$IMAGE_NAME
- name: Log in to registry
run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u $ --password-stdin
- name: Push image
run: |
IMAGE_ID=ghcr.io/${{ github.repository_owner }}/$IMAGE_NAME
# Change all uppercase to lowercase
IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]')
# Strip git ref prefix from version
VERSION=$(echo "${{ github.ref }}" | sed -e 's,.*/\(.*\),\1,')
# Strip "v" prefix from tag name
[[ "${{ github.ref }}" == "refs/tags/"* ]] && VERSION=$(echo $VERSION | sed -e 's/^v//')
# Use Docker `latest` tag convention for master/main
[ "$VERSION" == "master" ] && VERSION=latest
[ "$VERSION" == "main" ] && VERSION=latest
echo IMAGE_ID=$IMAGE_ID
echo VERSION=$VERSION
docker tag $IMAGE_NAME $IMAGE_ID:$VERSION
docker push $IMAGE_ID:$VERSION
# Get sha256 for later
IMAGE_DIGEST=$(docker inspect --format='{{index .RepoDigests 0}}' $IMAGE_NAME | cut -d@ -f2)
echo "IMAGE_DIGEST=$IMAGE_DIGEST" >> $GITHUB_ENV
- name: Notify Discord
uses: sarisia/actions-status-discord@v1
if: always()
with:
description: "Package digest: `${{ env.IMAGE_DIGEST }}`\n\nDeploy here: https://github.com/dreamwidth/dreamwidth/actions/workflows/web-deploy.yml"
webhook: ${{ secrets.DISCORD_WEBHOOK }}

63
.github/workflows/web-deploy.yml vendored Normal file
View File

@ -0,0 +1,63 @@
name: (deploy) web servers
on:
workflow_dispatch:
inputs:
service:
type: choice
description: Which service to deploy
options:
- web-canary
- web-stable
tag:
type: string
description: SHA256 to deploy (include "sha256:" prefix)
required: true
env:
REGION: us-east-1
ECS_CLUSTER: dreamwidth
CONTAINER_NAME: web
IMAGE_BASE: ghcr.io/dreamwidth/web
jobs:
deploy:
if: github.repository == 'dreamwidth/dreamwidth'
runs-on: ubuntu-latest
steps:
- name: Checkout Code
uses: actions/checkout@v3
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ env.REGION }}
- name: Render Amazon ECS task definition
id: render-web-container
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: ".github/workflows/tasks/${{ github.event.inputs.service }}-service.json"
container-name: ${{ env.CONTAINER_NAME }}
image: "${{ env.IMAGE_BASE }}@${{ github.event.inputs.tag }}"
- name: Deploy to Amazon ECS service
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-web-container.outputs.task-definition }}
cluster: ${{ env.ECS_CLUSTER }}
service: "${{ github.event.inputs.service }}-service"
- name: Notify Discord
uses: sarisia/actions-status-discord@v1
if: always()
with:
title: "${{ github.event.inputs.service }} DEPLOY STARTED"
description: "Deploying `${{ github.event.inputs.tag }}` to `${{ github.event.inputs.service }}`\n\nClick the header above to watch the deployment progress."
url: "https://${{ env.REGION }}.console.aws.amazon.com/ecs/v2/clusters/dreamwidth/services/${{ github.event.inputs.service }}-service/deployments?region=${{ env.REGION }}"
webhook: ${{ secrets.DISCORD_WEBHOOK }}
nocontext: true

65
.github/workflows/worker-build.yml vendored Normal file
View File

@ -0,0 +1,65 @@
name: (package) worker automatic build
on:
push:
branches:
- main
paths:
- bin/**
- cgi-bin/**
workflow_dispatch:
env:
IMAGE_NAME: worker
jobs:
build:
if: github.repository == 'dreamwidth/dreamwidth'
runs-on: ubuntu-latest
permissions:
packages: write
steps:
- name: Checkout Code
uses: actions/checkout@v3
- name: Build image
run: docker build -t $IMAGE_NAME --label "runnumber=${GITHUB_RUN_ID}" etc/docker/$IMAGE_NAME
- name: Log in to registry
run: echo "${{ secrets.GITHUB_TOKEN }}" | docker login ghcr.io -u $ --password-stdin
- name: Push image
run: |
IMAGE_ID=ghcr.io/${{ github.repository_owner }}/$IMAGE_NAME
# Change all uppercase to lowercase
IMAGE_ID=$(echo $IMAGE_ID | tr '[A-Z]' '[a-z]')
# Strip git ref prefix from version
VERSION=$(echo "${{ github.ref }}" | sed -e 's,.*/\(.*\),\1,')
# Strip "v" prefix from tag name
[[ "${{ github.ref }}" == "refs/tags/"* ]] && VERSION=$(echo $VERSION | sed -e 's/^v//')
# Use Docker `latest` tag convention for master/main
[ "$VERSION" == "master" ] && VERSION=latest
[ "$VERSION" == "main" ] && VERSION=latest
echo IMAGE_ID=$IMAGE_ID
echo VERSION=$VERSION
docker tag $IMAGE_NAME $IMAGE_ID:$VERSION
docker push $IMAGE_ID:$VERSION
# Get sha256 for later
IMAGE_DIGEST=$(docker inspect --format='{{index .RepoDigests 0}}' $IMAGE_NAME | cut -d@ -f2)
echo "IMAGE_DIGEST=$IMAGE_DIGEST" >> $GITHUB_ENV
- name: Notify Discord
uses: sarisia/actions-status-discord@v1
if: always()
with:
description: "Package digest: `${{ env.IMAGE_DIGEST }}`\n\nDeploy here: https://github.com/dreamwidth/dreamwidth/actions/workflows/worker-deploy.yml"
webhook: ${{ secrets.DISCORD_WEBHOOK }}

75
.github/workflows/worker-deploy.tt vendored Normal file
View File

@ -0,0 +1,75 @@
#
# AUTO-GENERATED. DO NOT EDIT.
#
name: (deploy) workers
on:
workflow_dispatch:
inputs:
service:
type: choice
description: Which service to deploy
options:
- ALL WORKERS (*)
[%- FOREACH worker_name IN workers.keys.sort %]
- [% worker_name %]
[%- END %]
tag:
type: string
description: SHA256 to deploy (include "sha256:" prefix)
required: true
env:
REGION: us-east-1
ECS_CLUSTER: dreamwidth
CONTAINER_NAME: worker
IMAGE_BASE: ghcr.io/dreamwidth/worker
jobs:
deploy:
if: github.repository == 'dreamwidth/dreamwidth'
runs-on: ubuntu-latest
steps:
- name: Checkout Code
uses: actions/checkout@v3
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ env.REGION }}
[%- FOREACH worker_name IN workers.keys.sort %]
- name: ([% worker_name %]) Render Amazon ECS task definition
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == '[% worker_name %]'
id: render-worker-container-[% worker_name %]
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: ".github/workflows/tasks/worker-[% worker_name %]-service.json"
container-name: ${{ env.CONTAINER_NAME }}
image: "${{ env.IMAGE_BASE }}@${{ github.event.inputs.tag }}"
[%- END %]
[%- FOREACH worker_name IN workers.keys.sort %]
- name: ([% worker_name %]) Deploy to Amazon ECS service
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == '[% worker_name %]'
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-worker-container-[% worker_name %].outputs.task-definition }}
cluster: ${{ env.ECS_CLUSTER }}
service: "worker-[% worker_name %]-service"
[%- END %]
- name: Notify Discord
uses: sarisia/actions-status-discord@v1
if: always()
with:
title: "${{ github.event.inputs.service }} DEPLOY STARTED"
description: "Deploying `${{ github.event.inputs.tag }}` to `${{ github.event.inputs.service }}`\n\nClick the header above to watch the deployment progress."
url: "https://${{ env.REGION }}.console.aws.amazon.com/ecs/v2/clusters/dreamwidth/services?region=${{ env.REGION }}"
webhook: ${{ secrets.DISCORD_WEBHOOK }}
nocontext: true

501
.github/workflows/worker-deploy.yml vendored Normal file
View File

@ -0,0 +1,501 @@
#
# AUTO-GENERATED. DO NOT EDIT.
#
name: (deploy) workers
on:
workflow_dispatch:
inputs:
service:
type: choice
description: Which service to deploy
options:
- ALL WORKERS (*)
- birthday-notify
- change-poster-id
- codebuild-notifier
- content-importer
- content-importer-lite
- content-importer-verify
- directory-meta
- distribute-invites
- dw-esn-cluster-subs
- dw-esn-filter-subs
- dw-esn-fired-event
- dw-esn-process-sub
- dw-send-email
- dw-sphinx-copier
- embeds
- esn-cluster-subs
- esn-filter-subs
- esn-fired-event
- esn-process-sub
- resolve-extacct
- send-email-ses
- spellcheck-gm
- sphinx-copier
- sphinx-search-gm
- synsuck
tag:
type: string
description: SHA256 to deploy (include "sha256:" prefix)
required: true
env:
REGION: us-east-1
ECS_CLUSTER: dreamwidth
CONTAINER_NAME: worker
IMAGE_BASE: ghcr.io/dreamwidth/worker
jobs:
deploy:
if: github.repository == 'dreamwidth/dreamwidth'
runs-on: ubuntu-latest
steps:
- name: Checkout Code
uses: actions/checkout@v3
- name: Configure AWS Credentials
uses: aws-actions/configure-aws-credentials@v1
with:
aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }}
aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }}
aws-region: ${{ env.REGION }}
- name: (birthday-notify) Render Amazon ECS task definition
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'birthday-notify'
id: render-worker-container-birthday-notify
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: ".github/workflows/tasks/worker-birthday-notify-service.json"
container-name: ${{ env.CONTAINER_NAME }}
image: "${{ env.IMAGE_BASE }}@${{ github.event.inputs.tag }}"
- name: (change-poster-id) Render Amazon ECS task definition
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'change-poster-id'
id: render-worker-container-change-poster-id
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: ".github/workflows/tasks/worker-change-poster-id-service.json"
container-name: ${{ env.CONTAINER_NAME }}
image: "${{ env.IMAGE_BASE }}@${{ github.event.inputs.tag }}"
- name: (codebuild-notifier) Render Amazon ECS task definition
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'codebuild-notifier'
id: render-worker-container-codebuild-notifier
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: ".github/workflows/tasks/worker-codebuild-notifier-service.json"
container-name: ${{ env.CONTAINER_NAME }}
image: "${{ env.IMAGE_BASE }}@${{ github.event.inputs.tag }}"
- name: (content-importer) Render Amazon ECS task definition
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'content-importer'
id: render-worker-container-content-importer
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: ".github/workflows/tasks/worker-content-importer-service.json"
container-name: ${{ env.CONTAINER_NAME }}
image: "${{ env.IMAGE_BASE }}@${{ github.event.inputs.tag }}"
- name: (content-importer-lite) Render Amazon ECS task definition
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'content-importer-lite'
id: render-worker-container-content-importer-lite
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: ".github/workflows/tasks/worker-content-importer-lite-service.json"
container-name: ${{ env.CONTAINER_NAME }}
image: "${{ env.IMAGE_BASE }}@${{ github.event.inputs.tag }}"
- name: (content-importer-verify) Render Amazon ECS task definition
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'content-importer-verify'
id: render-worker-container-content-importer-verify
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: ".github/workflows/tasks/worker-content-importer-verify-service.json"
container-name: ${{ env.CONTAINER_NAME }}
image: "${{ env.IMAGE_BASE }}@${{ github.event.inputs.tag }}"
- name: (directory-meta) Render Amazon ECS task definition
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'directory-meta'
id: render-worker-container-directory-meta
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: ".github/workflows/tasks/worker-directory-meta-service.json"
container-name: ${{ env.CONTAINER_NAME }}
image: "${{ env.IMAGE_BASE }}@${{ github.event.inputs.tag }}"
- name: (distribute-invites) Render Amazon ECS task definition
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'distribute-invites'
id: render-worker-container-distribute-invites
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: ".github/workflows/tasks/worker-distribute-invites-service.json"
container-name: ${{ env.CONTAINER_NAME }}
image: "${{ env.IMAGE_BASE }}@${{ github.event.inputs.tag }}"
- name: (dw-esn-cluster-subs) Render Amazon ECS task definition
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'dw-esn-cluster-subs'
id: render-worker-container-dw-esn-cluster-subs
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: ".github/workflows/tasks/worker-dw-esn-cluster-subs-service.json"
container-name: ${{ env.CONTAINER_NAME }}
image: "${{ env.IMAGE_BASE }}@${{ github.event.inputs.tag }}"
- name: (dw-esn-filter-subs) Render Amazon ECS task definition
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'dw-esn-filter-subs'
id: render-worker-container-dw-esn-filter-subs
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: ".github/workflows/tasks/worker-dw-esn-filter-subs-service.json"
container-name: ${{ env.CONTAINER_NAME }}
image: "${{ env.IMAGE_BASE }}@${{ github.event.inputs.tag }}"
- name: (dw-esn-fired-event) Render Amazon ECS task definition
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'dw-esn-fired-event'
id: render-worker-container-dw-esn-fired-event
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: ".github/workflows/tasks/worker-dw-esn-fired-event-service.json"
container-name: ${{ env.CONTAINER_NAME }}
image: "${{ env.IMAGE_BASE }}@${{ github.event.inputs.tag }}"
- name: (dw-esn-process-sub) Render Amazon ECS task definition
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'dw-esn-process-sub'
id: render-worker-container-dw-esn-process-sub
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: ".github/workflows/tasks/worker-dw-esn-process-sub-service.json"
container-name: ${{ env.CONTAINER_NAME }}
image: "${{ env.IMAGE_BASE }}@${{ github.event.inputs.tag }}"
- name: (dw-send-email) Render Amazon ECS task definition
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'dw-send-email'
id: render-worker-container-dw-send-email
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: ".github/workflows/tasks/worker-dw-send-email-service.json"
container-name: ${{ env.CONTAINER_NAME }}
image: "${{ env.IMAGE_BASE }}@${{ github.event.inputs.tag }}"
- name: (dw-sphinx-copier) Render Amazon ECS task definition
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'dw-sphinx-copier'
id: render-worker-container-dw-sphinx-copier
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: ".github/workflows/tasks/worker-dw-sphinx-copier-service.json"
container-name: ${{ env.CONTAINER_NAME }}
image: "${{ env.IMAGE_BASE }}@${{ github.event.inputs.tag }}"
- name: (embeds) Render Amazon ECS task definition
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'embeds'
id: render-worker-container-embeds
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: ".github/workflows/tasks/worker-embeds-service.json"
container-name: ${{ env.CONTAINER_NAME }}
image: "${{ env.IMAGE_BASE }}@${{ github.event.inputs.tag }}"
- name: (esn-cluster-subs) Render Amazon ECS task definition
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'esn-cluster-subs'
id: render-worker-container-esn-cluster-subs
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: ".github/workflows/tasks/worker-esn-cluster-subs-service.json"
container-name: ${{ env.CONTAINER_NAME }}
image: "${{ env.IMAGE_BASE }}@${{ github.event.inputs.tag }}"
- name: (esn-filter-subs) Render Amazon ECS task definition
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'esn-filter-subs'
id: render-worker-container-esn-filter-subs
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: ".github/workflows/tasks/worker-esn-filter-subs-service.json"
container-name: ${{ env.CONTAINER_NAME }}
image: "${{ env.IMAGE_BASE }}@${{ github.event.inputs.tag }}"
- name: (esn-fired-event) Render Amazon ECS task definition
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'esn-fired-event'
id: render-worker-container-esn-fired-event
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: ".github/workflows/tasks/worker-esn-fired-event-service.json"
container-name: ${{ env.CONTAINER_NAME }}
image: "${{ env.IMAGE_BASE }}@${{ github.event.inputs.tag }}"
- name: (esn-process-sub) Render Amazon ECS task definition
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'esn-process-sub'
id: render-worker-container-esn-process-sub
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: ".github/workflows/tasks/worker-esn-process-sub-service.json"
container-name: ${{ env.CONTAINER_NAME }}
image: "${{ env.IMAGE_BASE }}@${{ github.event.inputs.tag }}"
- name: (resolve-extacct) Render Amazon ECS task definition
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'resolve-extacct'
id: render-worker-container-resolve-extacct
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: ".github/workflows/tasks/worker-resolve-extacct-service.json"
container-name: ${{ env.CONTAINER_NAME }}
image: "${{ env.IMAGE_BASE }}@${{ github.event.inputs.tag }}"
- name: (send-email-ses) Render Amazon ECS task definition
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'send-email-ses'
id: render-worker-container-send-email-ses
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: ".github/workflows/tasks/worker-send-email-ses-service.json"
container-name: ${{ env.CONTAINER_NAME }}
image: "${{ env.IMAGE_BASE }}@${{ github.event.inputs.tag }}"
- name: (spellcheck-gm) Render Amazon ECS task definition
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'spellcheck-gm'
id: render-worker-container-spellcheck-gm
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: ".github/workflows/tasks/worker-spellcheck-gm-service.json"
container-name: ${{ env.CONTAINER_NAME }}
image: "${{ env.IMAGE_BASE }}@${{ github.event.inputs.tag }}"
- name: (sphinx-copier) Render Amazon ECS task definition
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'sphinx-copier'
id: render-worker-container-sphinx-copier
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: ".github/workflows/tasks/worker-sphinx-copier-service.json"
container-name: ${{ env.CONTAINER_NAME }}
image: "${{ env.IMAGE_BASE }}@${{ github.event.inputs.tag }}"
- name: (sphinx-search-gm) Render Amazon ECS task definition
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'sphinx-search-gm'
id: render-worker-container-sphinx-search-gm
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: ".github/workflows/tasks/worker-sphinx-search-gm-service.json"
container-name: ${{ env.CONTAINER_NAME }}
image: "${{ env.IMAGE_BASE }}@${{ github.event.inputs.tag }}"
- name: (synsuck) Render Amazon ECS task definition
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'synsuck'
id: render-worker-container-synsuck
uses: aws-actions/amazon-ecs-render-task-definition@v1
with:
task-definition: ".github/workflows/tasks/worker-synsuck-service.json"
container-name: ${{ env.CONTAINER_NAME }}
image: "${{ env.IMAGE_BASE }}@${{ github.event.inputs.tag }}"
- name: (birthday-notify) Deploy to Amazon ECS service
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'birthday-notify'
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-worker-container-birthday-notify.outputs.task-definition }}
cluster: ${{ env.ECS_CLUSTER }}
service: "worker-birthday-notify-service"
- name: (change-poster-id) Deploy to Amazon ECS service
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'change-poster-id'
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-worker-container-change-poster-id.outputs.task-definition }}
cluster: ${{ env.ECS_CLUSTER }}
service: "worker-change-poster-id-service"
- name: (codebuild-notifier) Deploy to Amazon ECS service
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'codebuild-notifier'
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-worker-container-codebuild-notifier.outputs.task-definition }}
cluster: ${{ env.ECS_CLUSTER }}
service: "worker-codebuild-notifier-service"
- name: (content-importer) Deploy to Amazon ECS service
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'content-importer'
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-worker-container-content-importer.outputs.task-definition }}
cluster: ${{ env.ECS_CLUSTER }}
service: "worker-content-importer-service"
- name: (content-importer-lite) Deploy to Amazon ECS service
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'content-importer-lite'
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-worker-container-content-importer-lite.outputs.task-definition }}
cluster: ${{ env.ECS_CLUSTER }}
service: "worker-content-importer-lite-service"
- name: (content-importer-verify) Deploy to Amazon ECS service
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'content-importer-verify'
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-worker-container-content-importer-verify.outputs.task-definition }}
cluster: ${{ env.ECS_CLUSTER }}
service: "worker-content-importer-verify-service"
- name: (directory-meta) Deploy to Amazon ECS service
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'directory-meta'
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-worker-container-directory-meta.outputs.task-definition }}
cluster: ${{ env.ECS_CLUSTER }}
service: "worker-directory-meta-service"
- name: (distribute-invites) Deploy to Amazon ECS service
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'distribute-invites'
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-worker-container-distribute-invites.outputs.task-definition }}
cluster: ${{ env.ECS_CLUSTER }}
service: "worker-distribute-invites-service"
- name: (dw-esn-cluster-subs) Deploy to Amazon ECS service
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'dw-esn-cluster-subs'
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-worker-container-dw-esn-cluster-subs.outputs.task-definition }}
cluster: ${{ env.ECS_CLUSTER }}
service: "worker-dw-esn-cluster-subs-service"
- name: (dw-esn-filter-subs) Deploy to Amazon ECS service
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'dw-esn-filter-subs'
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-worker-container-dw-esn-filter-subs.outputs.task-definition }}
cluster: ${{ env.ECS_CLUSTER }}
service: "worker-dw-esn-filter-subs-service"
- name: (dw-esn-fired-event) Deploy to Amazon ECS service
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'dw-esn-fired-event'
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-worker-container-dw-esn-fired-event.outputs.task-definition }}
cluster: ${{ env.ECS_CLUSTER }}
service: "worker-dw-esn-fired-event-service"
- name: (dw-esn-process-sub) Deploy to Amazon ECS service
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'dw-esn-process-sub'
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-worker-container-dw-esn-process-sub.outputs.task-definition }}
cluster: ${{ env.ECS_CLUSTER }}
service: "worker-dw-esn-process-sub-service"
- name: (dw-send-email) Deploy to Amazon ECS service
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'dw-send-email'
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-worker-container-dw-send-email.outputs.task-definition }}
cluster: ${{ env.ECS_CLUSTER }}
service: "worker-dw-send-email-service"
- name: (dw-sphinx-copier) Deploy to Amazon ECS service
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'dw-sphinx-copier'
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-worker-container-dw-sphinx-copier.outputs.task-definition }}
cluster: ${{ env.ECS_CLUSTER }}
service: "worker-dw-sphinx-copier-service"
- name: (embeds) Deploy to Amazon ECS service
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'embeds'
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-worker-container-embeds.outputs.task-definition }}
cluster: ${{ env.ECS_CLUSTER }}
service: "worker-embeds-service"
- name: (esn-cluster-subs) Deploy to Amazon ECS service
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'esn-cluster-subs'
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-worker-container-esn-cluster-subs.outputs.task-definition }}
cluster: ${{ env.ECS_CLUSTER }}
service: "worker-esn-cluster-subs-service"
- name: (esn-filter-subs) Deploy to Amazon ECS service
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'esn-filter-subs'
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-worker-container-esn-filter-subs.outputs.task-definition }}
cluster: ${{ env.ECS_CLUSTER }}
service: "worker-esn-filter-subs-service"
- name: (esn-fired-event) Deploy to Amazon ECS service
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'esn-fired-event'
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-worker-container-esn-fired-event.outputs.task-definition }}
cluster: ${{ env.ECS_CLUSTER }}
service: "worker-esn-fired-event-service"
- name: (esn-process-sub) Deploy to Amazon ECS service
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'esn-process-sub'
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-worker-container-esn-process-sub.outputs.task-definition }}
cluster: ${{ env.ECS_CLUSTER }}
service: "worker-esn-process-sub-service"
- name: (resolve-extacct) Deploy to Amazon ECS service
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'resolve-extacct'
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-worker-container-resolve-extacct.outputs.task-definition }}
cluster: ${{ env.ECS_CLUSTER }}
service: "worker-resolve-extacct-service"
- name: (send-email-ses) Deploy to Amazon ECS service
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'send-email-ses'
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-worker-container-send-email-ses.outputs.task-definition }}
cluster: ${{ env.ECS_CLUSTER }}
service: "worker-send-email-ses-service"
- name: (spellcheck-gm) Deploy to Amazon ECS service
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'spellcheck-gm'
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-worker-container-spellcheck-gm.outputs.task-definition }}
cluster: ${{ env.ECS_CLUSTER }}
service: "worker-spellcheck-gm-service"
- name: (sphinx-copier) Deploy to Amazon ECS service
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'sphinx-copier'
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-worker-container-sphinx-copier.outputs.task-definition }}
cluster: ${{ env.ECS_CLUSTER }}
service: "worker-sphinx-copier-service"
- name: (sphinx-search-gm) Deploy to Amazon ECS service
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'sphinx-search-gm'
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-worker-container-sphinx-search-gm.outputs.task-definition }}
cluster: ${{ env.ECS_CLUSTER }}
service: "worker-sphinx-search-gm-service"
- name: (synsuck) Deploy to Amazon ECS service
if: github.event.inputs.service == 'ALL WORKERS (*)' || github.event.inputs.service == 'synsuck'
uses: aws-actions/amazon-ecs-deploy-task-definition@v1
with:
task-definition: ${{ steps.render-worker-container-synsuck.outputs.task-definition }}
cluster: ${{ env.ECS_CLUSTER }}
service: "worker-synsuck-service"
- name: Notify Discord
uses: sarisia/actions-status-discord@v1
if: always()
with:
title: "${{ github.event.inputs.service }} DEPLOY STARTED"
description: "Deploying `${{ github.event.inputs.tag }}` to `${{ github.event.inputs.service }}`\n\nClick the header above to watch the deployment progress."
url: "https://${{ env.REGION }}.console.aws.amazon.com/ecs/v2/clusters/dreamwidth/services?region=${{ env.REGION }}"
webhook: ${{ secrets.DISCORD_WEBHOOK }}
nocontext: true

9
.gitignore vendored
View File

@ -1,7 +1,10 @@
/logs
/temp
/extlib
/ext
/ext/dw-private
/ext/local
/ext/ruby
/ext/yuicompressor
/etc/config*.pl
/build
/locks
@ -12,10 +15,14 @@ src/proxy/proxy
.vstags
.perl-cpm
# Ignore node_modules, wherever they occur
/**/node_modules/
# Ignore SCSS cache
.sass-cache
# Ignore compiled CSS
htdocs/stc/css
ext/dw-nonfree/htdocs/stc/css
# Ignore test stuff
t-theschwartz.sqlite

View File

@ -1,4 +1,6 @@
[PerlTidy]
select = .github/workflows/*.pl
select = bin/worker/dw-*
select = bin/worker/paidstatus
select = {bin,cgi-bin,t}/**/*.{pl,pm,t}
argv = -ole=unix -ci=4 -l=100

View File

@ -45,6 +45,10 @@ licensed under the terms indicated in each file, specifically:
modify it under the same terms as Perl itself. For a copy of the
license, please reference 'perldoc perlartistic' or 'perldoc perlgpl'.
Finally, files in ext/dw-nonfree are not licensed for use or
distribution. They are provided as examples only as they are the
Dreamwidth Studios branding and custom code.
--------
This program is distributed in the hope that it will be useful,

View File

@ -1,4 +1,4 @@
![build status](https://travis-ci.com/dreamwidth/dw-free.svg?branch=develop)
# Dreamwidth
Please see the `LICENSE` file for the license of this code. Note that all code
committed to this repository MUST be licensed under the GPL and have proper
@ -7,14 +7,6 @@ copyright notices tagged at the top of the file.
For more information on how to use this software, please harass someone to
actually write out documentation here. :-)
If you just want to get started with a fresh installation, you can get things
rolling along:
perl bin/bootstrap.pl
This will check out the various repositories that we use and put them in the
appropriate place.
Please [see our wiki for more information](http://wiki.dreamwidth.net/).
Thanks!

1
api/README.md Normal file
View File

@ -0,0 +1 @@
This folder contains the YAML files used to generate and validate [OpenAPI](https://openapis.org) routes for Dreamwidth, and to build the spec file supplied to end users. `src` contains the files you should edit - reusable components should go into `src\components\` and can then be referenced using JS Schema reference notation (eg, `$ref: components/schemas/username.yaml`). This cuts down on items that need to be retyped, and keeps descriptions of items consistent across different endpoints. `dist` contains the compiled YAML files that are used by the Perl endpoint controllers. Because YAML has no mechanism for file includes, there is unfortunately still a manual step required to rebuild the `dist` files when the `src` files are changed. First install node and then the necessary packages (`npm install` from inside this folder), and then run the `build.js` file (`node build.js`). This will compile the YAML files, and print any errors encountered along the way to the terminal.

52
api/build.js Normal file
View File

@ -0,0 +1,52 @@
const $RefParser = require("@apidevtools/json-schema-ref-parser");
const YAML = require('yaml');
const fs = require("fs");
const path = require("path");
async function* walk(dir) {
for await (const d of await fs.promises.opendir(dir)) {
const entry = path.join(dir, d.name);
if (d.isDirectory()) yield* walk(entry);
else if (d.isFile()) yield entry;
}
}
async function main() {
if (!fs.existsSync("dist/")) {
fs.mkdir("dist/", err => {
if (err) {
console.error(err);
}
});
}
for await (const p of walk('src/')) {
let out_path = p.replace('src/', 'dist/');
$RefParser.dereference(p, (err, schema) => {
if (err) {
console.log(p);
console.error(err);
}
else {
let out_dir = out_path.substring(0, out_path.lastIndexOf("/"));
if (!fs.existsSync(out_dir)) {
fs.mkdir(out_dir, err => {
if (err) {
console.error(err);
}
});
}
// console.log(YAML.stringify(schema));
fs.writeFile(out_path, YAML.stringify(schema), err => {
if (err) {
console.error(err);
}
// file written successfully
});
}
});
}
}
main();

7
api/dist/comments/screening.yaml vendored Normal file
View File

@ -0,0 +1,7 @@
paths:
/comments/screening:
get:
description: Returns descriptions of all possible comment screening options.
responses:
"200":
description: A list of comment screening options and their descriptions.

8
api/dist/components/error.yaml vendored Normal file
View File

@ -0,0 +1,8 @@
type: object
properties:
error:
type: string
description: A description of the error encountered.
example: "Bad format for username. Errors: String is too long: 77/25."
success:
type: number

12
api/dist/components/errors/400.yaml vendored Normal file
View File

@ -0,0 +1,12 @@
description: Bad or missing request parameters.
content:
application/json:
schema:
type: object
properties:
error:
type: string
description: A description of the error encountered.
example: "Bad format for username. Errors: String is too long: 77/25."
success:
type: number

View File

@ -0,0 +1,12 @@
description: Username specified does not exist.
content:
application/json:
schema:
type: object
properties:
error:
type: string
description: A description of the error encountered.
example: "Bad format for username. Errors: String is too long: 77/25."
success:
type: number

21
api/dist/components/schemas/icon.yaml vendored Normal file
View File

@ -0,0 +1,21 @@
type: object
required:
- comment
- picid
- username
- url
- keywords
properties:
comment:
type: string
picid:
type: integer
username:
type: string
description: The name of the journal this icon belongs to.
url:
type: string
keywords:
type: array
items:
type: string

View File

@ -0,0 +1,5 @@
type: string
minLength: 3
maxLength: 25
pattern: ^[0-9A-Za-z_]+$
example: example

65
api/dist/icons.yaml vendored Normal file
View File

@ -0,0 +1,65 @@
paths:
"/users/{username}/icons/{picid}":
parameters:
- name: username
in: path
description: The username you want icon information for
required: true
schema:
type: string
minLength: 3
maxLength: 25
pattern: ^[0-9A-Za-z_]+$
example: example
- name: picid
in: path
description: The picid you want information for.
required: true
schema:
type: integer
get:
description: Returns a single icon for a specified picid and username
responses:
"200":
description: An icon with its information
content:
application/json:
schema:
type: object
required:
- comment
- picid
- username
- url
- keywords
properties:
comment:
type: string
picid:
type: integer
username:
type: string
description: The name of the journal this icon belongs to.
url:
type: string
keywords:
type: array
items:
type: string
"400":
description: Bad or missing request parameters.
content:
application/json:
schema:
&a1
type: object
properties:
error:
type: string
description: A description of the error encountered.
example: "Bad format for username. Errors: String is too long: 77/25."
success:
type: number
"404":
description: No such username or icon.
schema: *a1

57
api/dist/icons_all.yaml vendored Normal file
View File

@ -0,0 +1,57 @@
paths:
"/users/{username}/icons":
parameters:
- name: username
in: path
description: The username you want icon information for
required: true
schema:
type: string
minLength: 3
maxLength: 25
pattern: ^[0-9A-Za-z_]+$
example: example
get:
description: Returns all icons for a specified username.
responses:
"200":
description: a list of icons
content:
application/json:
schema:
type: array
items:
type: object
required:
- comment
- picid
- username
- url
- keywords
properties:
comment:
type: string
picid:
type: integer
username:
type: string
description: The name of the journal this icon belongs to.
url:
type: string
keywords:
type: array
items:
type: string
"404":
description: Username specified does not exist.
content:
application/json:
schema:
type: object
properties:
error:
type: string
description: A description of the error encountered.
example: "Bad format for username. Errors: String is too long: 77/25."
success:
type: number

7
api/dist/spec.yaml vendored Normal file
View File

@ -0,0 +1,7 @@
paths:
/spec:
get:
description: Returns the API specification
responses:
"200":
description: This API specification!

View File

@ -1,40 +0,0 @@
---
paths:
/users/{username}/icons/{picid}:
parameters:
- name: username
in: path
description: The username you want icon information for
required: true
schema:
type: string
- name: picid
in: path
description: The picid you want information for.
required: true
schema:
type: integer
get:
description: Returns a single icon for a specified picid and username
responses:
200:
description: An icon with it's information
content:
application/json:
schema:
type: object
properties:
comment:
type: string
picid:
type: integer
username:
type: string
url:
type: string
keywords:
type: array
items:
type: string
404:
description: No such icon.

View File

@ -1,40 +0,0 @@
---
paths:
/users/{username}/icons:
parameters:
- name: username
in: path
description: The username you want icon information for
required: true
schema:
type: string
get:
description: Returns all icons for a specified username.
responses:
200:
description: a list of icons
content:
application/json:
schema:
type: array
items:
type: object
required:
- comment
- picid
- username
- url
- keywords
properties:
comment:
type: string
picid:
type: integer
username:
type: string
url:
type: string
keywords:
type: array
items:
type: string

112
api/package-lock.json generated Normal file
View File

@ -0,0 +1,112 @@
{
"name": "dreamwidth",
"version": "1.0.0",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "dreamwidth",
"version": "1.0.0",
"dependencies": {
"@apidevtools/json-schema-ref-parser": "^9.0.9",
"yaml": "^2.1.3"
}
},
"node_modules/@apidevtools/json-schema-ref-parser": {
"version": "9.0.9",
"resolved": "https://registry.npmjs.org/@apidevtools/json-schema-ref-parser/-/json-schema-ref-parser-9.0.9.tgz",
"integrity": "sha512-GBD2Le9w2+lVFoc4vswGI/TjkNIZSVp7+9xPf+X3uidBfWnAeUWmquteSyt0+VCrhNMWj/FTABISQrD3Z/YA+w==",
"dependencies": {
"@jsdevtools/ono": "^7.1.3",
"@types/json-schema": "^7.0.6",
"call-me-maybe": "^1.0.1",
"js-yaml": "^4.1.0"
}
},
"node_modules/@jsdevtools/ono": {
"version": "7.1.3",
"resolved": "https://registry.npmjs.org/@jsdevtools/ono/-/ono-7.1.3.tgz",
"integrity": "sha512-4JQNk+3mVzK3xh2rqd6RB4J46qUR19azEHBneZyTZM+c456qOrbbM/5xcR8huNCCcbVt7+UmizG6GuUvPvKUYg=="
},
"node_modules/@types/json-schema": {
"version": "7.0.11",
"resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.11.tgz",
"integrity": "sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ=="
},
"node_modules/argparse": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz",
"integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q=="
},
"node_modules/call-me-maybe": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/call-me-maybe/-/call-me-maybe-1.0.2.tgz",
"integrity": "sha512-HpX65o1Hnr9HH25ojC1YGs7HCQLq0GCOibSaWER0eNpgJ/Z1MZv2mTc7+xh6WOPxbRVcmgbv4hGU+uSQ/2xFZQ=="
},
"node_modules/js-yaml": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz",
"integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==",
"dependencies": {
"argparse": "^2.0.1"
},
"bin": {
"js-yaml": "bin/js-yaml.js"
}
},
"node_modules/yaml": {
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/yaml/-/yaml-2.1.3.tgz",
"integrity": "sha512-AacA8nRULjKMX2DvWvOAdBZMOfQlypSFkjcOcu9FalllIDJ1kvlREzcdIZmidQUqqeMv7jorHjq2HlLv/+c2lg==",
"engines": {
"node": ">= 14"
}
}
},
"dependencies": {
"@apidevtools/json-schema-ref-parser": {
"version": "9.0.9",
"resolved": "https://registry.npmjs.org/@apidevtools/json-schema-ref-parser/-/json-schema-ref-parser-9.0.9.tgz",
"integrity": "sha512-GBD2Le9w2+lVFoc4vswGI/TjkNIZSVp7+9xPf+X3uidBfWnAeUWmquteSyt0+VCrhNMWj/FTABISQrD3Z/YA+w==",
"requires": {
"@jsdevtools/ono": "^7.1.3",
"@types/json-schema": "^7.0.6",
"call-me-maybe": "^1.0.1",
"js-yaml": "^4.1.0"
}
},
"@jsdevtools/ono": {
"version": "7.1.3",
"resolved": "https://registry.npmjs.org/@jsdevtools/ono/-/ono-7.1.3.tgz",
"integrity": "sha512-4JQNk+3mVzK3xh2rqd6RB4J46qUR19azEHBneZyTZM+c456qOrbbM/5xcR8huNCCcbVt7+UmizG6GuUvPvKUYg=="
},
"@types/json-schema": {
"version": "7.0.11",
"resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.11.tgz",
"integrity": "sha512-wOuvG1SN4Us4rez+tylwwwCV1psiNVOkJeM3AUWUNWg/jDQY2+HE/444y5gc+jBmRqASOm2Oeh5c1axHobwRKQ=="
},
"argparse": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz",
"integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q=="
},
"call-me-maybe": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/call-me-maybe/-/call-me-maybe-1.0.2.tgz",
"integrity": "sha512-HpX65o1Hnr9HH25ojC1YGs7HCQLq0GCOibSaWER0eNpgJ/Z1MZv2mTc7+xh6WOPxbRVcmgbv4hGU+uSQ/2xFZQ=="
},
"js-yaml": {
"version": "4.1.0",
"resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz",
"integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==",
"requires": {
"argparse": "^2.0.1"
}
},
"yaml": {
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/yaml/-/yaml-2.1.3.tgz",
"integrity": "sha512-AacA8nRULjKMX2DvWvOAdBZMOfQlypSFkjcOcu9FalllIDJ1kvlREzcdIZmidQUqqeMv7jorHjq2HlLv/+c2lg=="
}
}
}

19
api/package.json Normal file
View File

@ -0,0 +1,19 @@
{
"dependencies": {
"@apidevtools/json-schema-ref-parser": "^9.0.9",
"yaml": "^2.1.3"
},
"name": "dreamwidth",
"version": "1.0.0",
"main": "index.js",
"scripts": {
"test": "echo \"Error: no test specified\" && exit 1",
"build:json": "boats -i ./src/index.yml.njk -o ./build/${npm_package_name}.json",
"build:yaml": "boats -i ./src/index.yml.njk -o ./build/${npm_package_name}.yml",
"build": "npm run build:json && npm run build:yaml"
},
"keywords": [],
"author": "",
"description": "",
"private": true
}

View File

@ -0,0 +1,8 @@
type: object
properties:
error:
type: string
description: A description of the error encountered.
example: "Bad format for username. Errors: String is too long: 77/25."
success:
type: number

View File

@ -0,0 +1,5 @@
description: Bad or missing request parameters.
content:
application/json:
schema:
$ref: ../error.yaml

View File

@ -0,0 +1,5 @@
description: Username specified does not exist.
content:
application/json:
schema:
$ref: ../error.yaml

View File

@ -0,0 +1,21 @@
type: object
required:
- comment
- picid
- username
- url
- keywords
properties:
comment:
type: string
picid:
type: integer
username:
type: string
description: The name of the journal this icon belongs to.
url:
type: string
keywords:
type: array
items:
type: string

View File

@ -0,0 +1,5 @@
type: string
minLength: 3
maxLength: 25
pattern: "^[0-9A-Za-z_]+$"
example: example

31
api/src/icons.yaml Normal file
View File

@ -0,0 +1,31 @@
---
paths:
/users/{username}/icons/{picid}:
parameters:
- name: username
in: path
description: The username you want icon information for
required: true
schema:
$ref: components/schemas/username.yaml
- name: picid
in: path
description: The picid you want information for.
required: true
schema:
type: integer
get:
description: Returns a single icon for a specified picid and username
responses:
200:
description: An icon with its information
content:
application/json:
schema:
$ref: components/schemas/icon.yaml
404:
description: No such username or icon.
schema:
$ref: components/error.yaml
400:
$ref: components/errors/400.yaml

23
api/src/icons_all.yaml Normal file
View File

@ -0,0 +1,23 @@
---
paths:
/users/{username}/icons:
parameters:
- name: username
in: path
description: The username you want icon information for
required: true
schema:
$ref: components/schemas/username.yaml
get:
description: Returns all icons for a specified username.
responses:
200:
description: a list of icons
content:
application/json:
schema:
type: array
items:
$ref: components/schemas/icon.yaml
404:
$ref: components/errors/404-user.yaml

View File

@ -1,98 +0,0 @@
#!/usr/bin/perl
use warnings;
use strict;
use 5.010;
use Getopt::Long;
# first, try to determine the user's github username: see if they gave a
# --github-user arg, or if the env var GITHUB_USER is set
my $GITHUB_USER;
my $DW_NONFREE;
GetOptions(
'github-user=s' => \$GITHUB_USER,
'dw-nonfree!' => \$DW_NONFREE,
);
$GITHUB_USER //= $ENV{GITHUB_USER} if exists $ENV{GITHUB_USER};
die "Can't find your github username! " . "Try bootstrap.pl --github-user <username>\n"
unless defined $GITHUB_USER;
# github https user url: eg https://rahaeli@github.com/rahaeli
my $github_user_url = "https://$GITHUB_USER\@github.com/$GITHUB_USER";
# see if we can reach a git executable
system( 'bash', '-c', 'type git' );
die "I can't find git on your system -- is it installed?" unless $? == 0;
# see if LJHOME is defined, if it's present, and if we can go there
my $LJHOME = $ENV{LJHOME};
die "Must set the \$LJHOME environment variable before running this.\n"
unless defined $LJHOME;
mkdir $LJHOME unless -d $LJHOME;
chdir($LJHOME) or die "Couldn't chdir to \$LJHOME directory.\n";
# a .git dir in $LJHOME means dw-free is checked out. otherwise, get it
if ( -d '.git' ) {
say "Looks like you already have dw-free checked out; skipping.";
}
else {
say "Checking out dw-free to $LJHOME";
say "Please enter the github password for $GITHUB_USER";
git( 'clone', $github_user_url . '/dw-free.git', $LJHOME );
configure_dw_upstream('dw-free');
}
# now get dw-nonfree if it's not there *and* the user has asked for it
if ( -d "$LJHOME/ext/dw-nonfree/.git" ) {
say "Looks like you already have dw-nonfree checked out; skipping.";
}
elsif ($DW_NONFREE) {
say "Checking out dw-nonfree to $LJHOME/ext";
say "Please use dw-nonfree for dreamwidth.org development only.";
say "See $LJHOME/ext/dw-nonfree/README for details.";
chdir("$LJHOME/ext") or die "Couldn't chdir to ext directory.\n";
say "Please enter the github password for $GITHUB_USER";
git( 'clone', $github_user_url . '/dw-nonfree.git' );
chdir("$LJHOME/ext/dw-nonfree")
or die "Couldn't chdir to dw-nonfree directory.\n";
configure_dw_upstream('dw-nonfree');
}
else {
say "dw-nonfree not installed since it wasn't requested.";
say "If you are developing for dreamwidth.org, you can install";
say "the Dreamwidth-specific items in dw-nonfree by running this";
say "command again with the option --dw-nonfree:";
say " perl bootstrap.pl --github-user <username> --dw-nonfree";
}
# a little syntactic sugar: run a git command
sub git {
system( 'git', @_ );
die "failure trying to run: git @_: $!\n" unless $? == 0;
}
sub configure_dw_upstream {
my ($repo) = @_;
say "Configuring dreamwidth's $repo as the upstream of your $repo.";
my $dw_repo_url = "https://github.com/dreamwidth/$repo";
git( qw{remote add dreamwidth}, $dw_repo_url );
git(qw{fetch dreamwidth});
git(qw{branch --set-upstream develop dreamwidth/develop});
git(qw{branch --set-upstream master dreamwidth/master});
}
# finished :-)
say "Done! You probably want to set up the MySQL database next:";
say "http://wiki.dreamwidth.net/notes/Dreamwidth_Scratch_Installation#Database_setup";

View File

@ -5,9 +5,27 @@
# 'perldoc perlartistic' or 'perldoc perlgpl'.
force="--force"
while getopts ":n" opt; do
case ${opt} in
n )
force=""
;;
\? )
echo "$0: illegal option -- $OPTARG" 1>&2
exit 1
;;
esac
done
buildroot="$LJHOME/build/static"
mkdir -p $buildroot
if [[ -e /tmp/jcompress ]]; then
rm /tmp/jcompress
fi
compressor="$LJHOME/ext/yuicompressor/yuicompressor.jar"
uncompressed_dir="/max"
if [ ! -e $compressor ]
@ -25,10 +43,10 @@ if [ "$compass" != "" ]; then
if [ $compass_version_ok ]; then
echo "* Building SCSS..."
cd $LJHOME
$compass compile -e production --force
$compass compile -e production $force
if [ -d "$LJHOME/ext/dw-nonfree" ]; then
cd $LJHOME/ext/dw-nonfree
$compass compile -e production --force
$compass compile -e production $force
fi
else
echo "Compass version must be 1.0 or higher. Please upgrade."
@ -86,10 +104,11 @@ do
fi
mkdir -p "$final/$dir"
cp -p "$synced_file" "$final/$modified_file"
if [[ "$ext" = "js" || "$ext" = "css" ]]; then
java -jar $compressor "$synced_file" -o "$final/$modified_file"
else
cp -p "$synced_file" "$final/$modified_file"
# Attempt to rewrite the file with compressed version
echo "java -jar $compressor \"$synced_file\" -o \"$final/$modified_file\"" >> /tmp/jcompress
fi
else
# we're deleting rather than copying
@ -102,6 +121,12 @@ do
fi
fi
done
# Now parallel execute
if [[ -e /tmp/jcompress ]]; then
echo "Executing compression (takes a minute)..."
cat /tmp/jcompress | xargs -d "\n" -n 1 -P 4 -- bash -c
fi
done
if [[ -n $compressor ]]; then
@ -110,3 +135,4 @@ if [[ -n $compressor ]]; then
find $buildroot/stc $buildroot/max/stc | sed "s/$escaped\/\(max\/\)\?//" | sort | uniq -c | sort -n | grep '^[[:space:]]\+1'
fi
exit 0

View File

@ -176,7 +176,7 @@ sub check_env {
my $file = $_[0];
my $out = "$tempdir/out";
my $err = "$tempdir/err";
system qq($^X -c $file > $out 2>$err);
system qq($^X -I"$ENV{LJHOME}/extlib/lib/perl5" -c $file > $out 2>$err);
my $err_data = $slurp->($err);
return 1 if $err_data && $err_data eq "$file syntax OK\n";
};

View File

@ -0,0 +1,43 @@
#!/usr/bin/perl
#
# dump-profile-editors.pl -- Read and reset the profile_editors key from memcache
#
# Copyright (c) 2022 by Dreamwidth Studios, LLC.
#
# This program is free software; you may redistribute it and/or modify it under
# the same terms as Perl itself. For a copy of the license, please reference
# 'perldoc perlartistic' or 'perldoc perlgpl'.
use strict;
use Getopt::Long;
# parse input options
my $ro;
GetOptions( 'readonly' => \$ro );
# now load in the beast
BEGIN {
require "$ENV{'LJHOME'}/cgi-bin/ljlib.pl";
}
use LJ::MemCache;
my $memval = LJ::MemCache::get('profile_editors') // [];
LJ::MemCache::delete('profile_editors') unless $ro;
my $us = LJ::load_userids(@$memval);
my @users = sort { $a->user cmp $b->user } values %$us;
foreach my $u (@users) {
next unless $u && $u->is_visible;
my $url = $u->url;
my $urlname = $u->prop('urlname');
next if index( $url, '.' ) == -1 && index( $urlname, '.' ) == -1;
my $timecreate = scalar localtime( $u->timecreate );
my $user = $u->user;
print "$user\t$timecreate\t$url\t$urlname\n";
}

7
bin/tidyall Executable file
View File

@ -0,0 +1,7 @@
#!/bin/bash
cd $LJHOME
perl -I$LJHOME/extlib/lib/perl5 $LJHOME/extlib/bin/tidyall -a
cd $LJHOME/ext/dw-nonfree
perl -I$LJHOME/extlib/lib/perl5 $LJHOME/extlib/bin/tidyall -a

View File

@ -936,6 +936,43 @@ general /tools/endpoints/extacct_auth.bml.error.nouser
general /tools/memadd.bml.body.added.body
general /tools/tellafriend.bml.email.body
general /tools/tellafriend.bml.email.body.boxtitle
general /tools/tellafriend.bml.email.body.custom
general /tools/tellafriend.bml.email.body.footer1
general /tools/tellafriend.bml.email.body.otherjournal
general /tools/tellafriend.bml.email.body.yourjournal
general /tools/tellafriend.bml.email.formatinfo
general /tools/tellafriend.bml.email.fromfield
general /tools/tellafriend.bml.email.recipientfield
general /tools/tellafriend.bml.email.sharedentry.title
general /tools/tellafriend.bml.email.sharedentry.url
general /tools/tellafriend.bml.email.subject.entryhassubject
general /tools/tellafriend.bml.email.subject.entrynosubject
general /tools/tellafriend.bml.email.subject.journal
general /tools/tellafriend.bml.email.subject.noentry
general /tools/tellafriend.bml.email.usemask.footer
general /tools/tellafriend.bml.email.warning.otherpublic
general /tools/tellafriend.bml.email.warning.private
general /tools/tellafriend.bml.email.warning.usemask
general /tools/tellafriend.bml.error.characterlimit
general /tools/tellafriend.bml.error.disabled
general /tools/tellafriend.bml.error.forbiddenimages
general /tools/tellafriend.bml.error.forbiddenurl
general /tools/tellafriend.bml.error.maximumemails
general /tools/tellafriend.bml.error.noemail
general /tools/tellafriend.bml.error.unknownjournal
general /tools/tellafriend.bml.errorpage.body
general /tools/tellafriend.bml.errorpage.title
general /tools/tellafriend.bml.invalidemailpage.body
general /tools/tellafriend.bml.invalidemailpage.title
general /tools/tellafriend.bml.sendbutton
general /tools/tellafriend.bml.sentpage.body.mailedlist
general /tools/tellafriend.bml.sentpage.body.tellanother
general /tools/tellafriend.bml.sentpage.title
general /tools/tellafriend.bml.title
general /tools/tellafriend.bml.via
general /update.bml.update.about
general /update.bml.login.success
general /update.bml.htmlokay

View File

@ -3234,6 +3234,8 @@ Regards,
The [[sitename]] Team
.
shop.email.acct.body.expires=Your new paid time expiration date is: [[date]]
shop.email.acct.body.note<<
@ -5349,8 +5351,12 @@ widget.shopcart.paymentmethod=Select a Payment Method:
widget.shopcart.paymentmethod.checkmoneyorder=Check/Money Order
widget.shopcart.paymentmethod.checkmoneyorder.whydisabled=Sorry, the "Check/Money Order" payment option is disabled because the cash amount of the cart is less than [[minimum]].
widget.shopcart.paymentmethod.creditcard=Credit Card
widget.shopcart.paymentmethod.creditcard.whydisabled=Sorry, the "Credit Card" payment option is disabled because the site owner has not configured a payment processor.
widget.shopcart.paymentmethod.creditcardpp=Credit Card
widget.shopcart.paymentmethod.free=Free/No Cost Order

View File

@ -441,10 +441,10 @@ userproplist.github:
userproplist.google_talk:
cldversion: 0
datatype: char
des: Google's 'Hangouts' Service address
des: Google Chat Service address
indexed: 1
multihomed: 1
prettyname: Google Hangouts Address
prettyname: Google Chat Address
userproplist.google_analytics:
cldversion: 4

View File

@ -942,7 +942,6 @@ CREATE TABLE userusage (
PRIMARY KEY (userid),
timecreate DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
timeupdate DATETIME,
timecheck DATETIME,
lastitemid INT UNSIGNED NOT NULL DEFAULT '0',
INDEX (timeupdate)
@ -952,8 +951,8 @@ EOC
post_create(
"userusage",
"sqltry" =>
"INSERT IGNORE INTO userusage (userid, timecreate, timeupdate, timecheck, lastitemid) SELECT userid, timecreate, timeupdate, timecheck, lastitemid FROM user",
"sqltry" => "ALTER TABLE user DROP timecreate, DROP timeupdate, DROP timecheck, DROP lastitemid"
"INSERT IGNORE INTO userusage (userid, timecreate, timeupdate, lastitemid) SELECT userid, timecreate, timeupdate, lastitemid FROM user",
"sqltry" => "ALTER TABLE user DROP timecreate, DROP timeupdate, DROP lastitemid"
);
register_tablecreate( "acctcode", <<'EOC');
@ -2852,7 +2851,7 @@ CREATE TABLE sitekeywords (
)
EOC
# this table is included, even though it's not used in the stock dw-free
# this table is included, even though it's not used in the stock Dreamwidth
# installation. but if you want to use it, you can, or you can ignore it
# and make your own which you might have to do.
register_tablecreate( 'cc_trans', <<'EOC');
@ -4212,6 +4211,11 @@ q{INSERT INTO media_versions (userid, mediaid, versionid, width, height, filesiz
do_alter( 'subs',
'ALTER TABLE subs MODIFY COLUMN flags SMALLINT UNSIGNED NOT NULL DEFAULT 0' );
}
if ( column_type( 'userusage', 'timecheck' ) ) {
do_alter( 'user',
'ALTER TABLE userusage DROP COLUMN timecheck, ALGORITHM=INPLACE, LOCK=NONE' );
}
}
);

View File

@ -207,5 +207,5 @@ sub timestr {
sub github_url {
my $hash = substr( $_[0], 0, 8 );
return qq|[$hash](https://github.com/dreamwidth/dw-free/commit/$hash)|;
return qq|[$hash](https://github.com/dreamwidth/dreamwidth/commit/$hash)|;
}

View File

@ -16,6 +16,7 @@
#
use strict;
BEGIN {
require "$ENV{LJHOME}/cgi-bin/ljlib.pl";
}
@ -27,26 +28,24 @@ use DW::Shop;
use DW::Shop::Cart;
use DW::Pay;
################################################################################
## main setup
################################################################################
# setup logging routine
my $begin_time = [ gettimeofday() ];
my ( $logfile, $last_log_time );
my $log = sub {
$last_log_time ||= [ gettimeofday() ];
unless ( $logfile ) {
unless ($logfile) {
open $logfile, ">>$LJ::HOME/logs/paidstatus.log"
or die "Internal server error creating log.\n";
print $logfile "[0.00s 0.00s] Log started at " . LJ::mysql_time( gmtime() ) . ".\n";
}
my $fmt = "[%0.4fs %0.1fs] " . shift() . "\n";
my $msg = sprintf( $fmt, tv_interval( $last_log_time ), tv_interval( $begin_time ), @_ );
my $msg = sprintf( $fmt, tv_interval($last_log_time), tv_interval($begin_time), @_ );
# now log to both the file and STDERR if we're foregrounded
print $logfile $msg;
@ -59,57 +58,60 @@ my $log = sub {
my $alert = sub {
LJ::send_mail(
{
to => $LJ::PAYPAL_CONFIG{email},
from => $LJ::BOGUS_EMAIL,
to => $LJ::PAYPAL_CONFIG{email},
from => $LJ::BOGUS_EMAIL,
subject => "$LJ::SITENAME Payment System Alert",
body => shift(),
body => shift(),
}
);
return undef;
};
while ( 1 ) {
$log->( 'Main loop beginning...' );
while (1) {
$log->('Main loop beginning...');
# do this in a sub so it can return on error
main_loop();
# now we sleep to the next one minute boundary, and if we're taking more
# than one minute to run, we fire off an alert
my $sleep_time = 10 - tv_interval( $begin_time );
my $sleep_time = 10 - tv_interval($begin_time);
if ( $sleep_time <= 0 ) {
$alert->( 'Warning: main loop is taking longer than a minute.' );
$alert->('Warning: main loop is taking longer than a minute.');
$sleep_time = 10;
}
$log->( 'Sleeping for %0.2f seconds.', $sleep_time );
select undef, undef, undef, $sleep_time;
$log->( 'Main loop ended.' );
$log->('Main loop ended.');
$begin_time = [ gettimeofday() ];
}
################################################################################
## main loop
################################################################################
sub main_loop {
# disconnect dbs
LJ::DB::disconnect_dbs();
LJ::start_request();
# now get a db or die
my $dbh = LJ::get_db_writer()
or return $log->( 'Unable to get database writer handle.' );
or return $log->('Unable to get database writer handle.');
## PHASE 0) REMOVE DEAD CARTS (unused for more than 30 days)
## PHASE 0) REMOVE DEAD CARTS (open or closed for more than 30 days)
my $ct = $dbh->do( 'UPDATE shop_carts SET state = ? WHERE state = ? AND starttime < UNIX_TIMESTAMP() - 86400 * 30',
undef, $DW::Shop::STATE_CLOSED, $DW::Shop::STATE_OPEN );
my $ct = $dbh->do(
q{DELETE FROM shop_carts
WHERE state IN (?, ?) AND starttime < UNIX_TIMESTAMP() - 86400 * 30
LIMIT 1000},
undef, $DW::Shop::STATE_CLOSED, $DW::Shop::STATE_OPEN
);
return $log->( 'Database error cleaning carts: %s', $dbh->errstr )
if $dbh->err;
$log->( 'Cleaned %d carts that were unused for more than 30 days.', $ct+0 );
$log->( 'Cleaned %d carts that were unused for more than 30 days.', $ct + 0 );
DW::Stats::increment( 'dw.shop.cart.expired', $ct );
## PHASE 1) PROCESS PAYMENTS
@ -117,14 +119,13 @@ sub main_loop {
# dig up carts that are in state paid and scannable
my $cartids = $dbh->selectcol_arrayref(
q{SELECT cartid FROM shop_carts WHERE state = ? AND nextscan < UNIX_TIMESTAMP()},
undef, $DW::Shop::STATE_PAID
);
undef, $DW::Shop::STATE_PAID );
return $log->( 'Database error: %s', $dbh->errstr )
if $dbh->err;
return $log->( 'Invalid response looking for scannable carts.' )
return $log->('Invalid response looking for scannable carts.')
unless $cartids && ref $cartids eq 'ARRAY';
$log->( 'Found %d scannable carts.', scalar( @$cartids ) );
$log->( 'Found %d scannable carts.', scalar(@$cartids) );
# now iterate over these and do something with them
scan_cart( $dbh, $_ ) foreach @$cartids;
@ -139,7 +140,7 @@ sub main_loop {
return $log->( 'Database error: %s', $dbh->errstr )
if $dbh->err;
$log->( 'Found %d expired users.', scalar( @$uids ) );
$log->( 'Found %d expired users.', scalar(@$uids) );
# now expire the user
expire_user( $dbh, $_ ) foreach @$uids;
@ -159,72 +160,87 @@ sub main_loop {
return $log->( 'Database error: %s', $dbh->errstr )
if $dbh->err;
$log->( 'Found %d users expiring soon.', scalar( @$rows ) );
$log->( 'Found %d users expiring soon.', scalar(@$rows) );
# now warn the user
warn_user( $dbh, $_ ) foreach @$rows;
}
sub expire_user {
my ( $dbh, $uid ) = @_;
my $u = LJ::load_userid( $uid )
my $u = LJ::load_userid($uid)
or return 0;
$log->( 'Expiring %s(%d).', $u->user, $u->id );
if ( $u->is_community && $u->is_visible ) {
# send an email to every maintainer
my $maintus = LJ::load_userids( $u->maintainer_userids );
foreach my $maintu ( values %$maintus ) {
LJ::send_mail( {
to => $maintu->email_raw,
fromname => $LJ::SITENAME,
from => $LJ::ACCOUNTS_EMAIL,
subject => LJ::Lang::ml( "shop.expiration.comm.0.subject", { sitename => $LJ::SITENAME } ),
body => LJ::Lang::ml( "shop.expiration.comm.0.body", {
touser => $maintu->display_name,
commname => $u->display_name,
shopurl => "$LJ::SITEROOT/shop/account?for=gift&user=" . $u->user,
sitename => $LJ::SITENAME,
} ),
} );
LJ::send_mail(
{
to => $maintu->email_raw,
fromname => $LJ::SITENAME,
from => $LJ::ACCOUNTS_EMAIL,
subject => LJ::Lang::ml(
"shop.expiration.comm.0.subject",
{ sitename => $LJ::SITENAME }
),
body => LJ::Lang::ml(
"shop.expiration.comm.0.body",
{
touser => $maintu->display_name,
commname => $u->display_name,
shopurl => "$LJ::SITEROOT/shop/account?for=gift&user=" . $u->user,
sitename => $LJ::SITENAME,
}
),
}
);
}
} elsif ( $u->is_visible ) {
LJ::send_mail( {
to => $u->email_raw,
fromname => $LJ::SITENAME,
from => $LJ::ACCOUNTS_EMAIL,
subject => LJ::Lang::ml( "shop.expiration.user.0.subject", { sitename => $LJ::SITENAME } ),
body => LJ::Lang::ml( "shop.expiration.user.0.body", {
touser => $u->display_name,
shopurl => "$LJ::SITEROOT/shop/account?for=self",
sitename => $LJ::SITENAME,
} ),
} );
}
elsif ( $u->is_visible ) {
LJ::send_mail(
{
to => $u->email_raw,
fromname => $LJ::SITENAME,
from => $LJ::ACCOUNTS_EMAIL,
subject =>
LJ::Lang::ml( "shop.expiration.user.0.subject", { sitename => $LJ::SITENAME } ),
body => LJ::Lang::ml(
"shop.expiration.user.0.body",
{
touser => $u->display_name,
shopurl => "$LJ::SITEROOT/shop/account?for=self",
sitename => $LJ::SITENAME,
}
),
}
);
}
# this is pretty easy, we just tell DW::Pay to do it
return DW::Pay::expire_user( $uid );
return DW::Pay::expire_user($uid);
}
sub warn_user {
my ( $dbh, $row ) = @_;
my ( $uid, $lastmail, $timeleft ) = @$row;
my $u = LJ::load_userid( $uid )
my $u = LJ::load_userid($uid)
or return 0;
return 0 unless $u->is_visible;
my $mail;
if ( $timeleft < 86400*3 && ( ! defined $lastmail || $lastmail == 14 ) ) {
if ( $timeleft < 86400 * 3 && ( !defined $lastmail || $lastmail == 14 ) ) {
$log->( 'Sending 3-day expiration mail to %s(%d).', $u->user, $u->id );
$mail = '3';
} elsif ( $timeleft < 86400*14 && ! defined $lastmail ) {
}
elsif ( $timeleft < 86400 * 14 && !defined $lastmail ) {
$log->( 'Sending 14-day expiration mail to %s(%d).', $u->user, $u->id );
$mail = '14';
}
@ -235,60 +251,74 @@ sub warn_user {
# alter warning message body for premium paid accounts
my $bodytype = $mail;
my $status = DW::Pay::get_account_type( $u );
my $status = DW::Pay::get_account_type($u);
$bodytype = "$status.$mail" if $status eq "premium";
if ( $u->is_community ) {
# send an email to every maintainer
my $maintus = LJ::load_userids( $u->maintainer_userids );
foreach my $maintu ( values %$maintus ) {
LJ::send_mail( {
to => $maintu->email_raw,
fromname => $LJ::SITENAME,
from => $LJ::ACCOUNTS_EMAIL,
subject => LJ::Lang::ml( "shop.expiration.comm.$mail.subject", { sitename => $LJ::SITENAME } ),
body => LJ::Lang::ml( "shop.expiration.comm.$bodytype.body", {
touser => $maintu->display_name,
commname => $u->display_name,
shopurl => "$LJ::SITEROOT/shop/account?for=gift&user=" . $u->user,
sitename => $LJ::SITENAME,
} ),
} );
LJ::send_mail(
{
to => $maintu->email_raw,
fromname => $LJ::SITENAME,
from => $LJ::ACCOUNTS_EMAIL,
subject => LJ::Lang::ml(
"shop.expiration.comm.$mail.subject",
{ sitename => $LJ::SITENAME }
),
body => LJ::Lang::ml(
"shop.expiration.comm.$bodytype.body",
{
touser => $maintu->display_name,
commname => $u->display_name,
shopurl => "$LJ::SITEROOT/shop/account?for=gift&user=" . $u->user,
sitename => $LJ::SITENAME,
}
),
}
);
}
} else {
LJ::send_mail( {
to => $u->email_raw,
fromname => $LJ::SITENAME,
from => $LJ::ACCOUNTS_EMAIL,
subject => LJ::Lang::ml( "shop.expiration.user.$mail.subject", { sitename => $LJ::SITENAME } ),
body => LJ::Lang::ml( "shop.expiration.user.$bodytype.body", {
touser => $u->display_name,
shopurl => "$LJ::SITEROOT/shop/account?for=self",
sitename => $LJ::SITENAME,
} ),
} );
}
else {
LJ::send_mail(
{
to => $u->email_raw,
fromname => $LJ::SITENAME,
from => $LJ::ACCOUNTS_EMAIL,
subject => LJ::Lang::ml(
"shop.expiration.user.$mail.subject",
{ sitename => $LJ::SITENAME }
),
body => LJ::Lang::ml(
"shop.expiration.user.$bodytype.body",
{
touser => $u->display_name,
shopurl => "$LJ::SITEROOT/shop/account?for=self",
sitename => $LJ::SITENAME,
}
),
}
);
}
# now update the db
$dbh->do( 'UPDATE dw_paidstatus SET lastemail = ? WHERE userid = ?',
undef, $mail+0, $u->id );
$dbh->do( 'UPDATE dw_paidstatus SET lastemail = ? WHERE userid = ?', undef, $mail + 0, $u->id );
return 0
if $dbh->err;
return 1;
}
sub scan_cart {
my $dbh = shift;
my $dbh = shift;
my $cartid = shift() + 0;
# easy sub for setting nextscan on this cart
my $nextscan = sub {
$dbh->do(
q{UPDATE shop_carts SET nextscan = UNIX_TIMESTAMP() + ? WHERE cartid = ?},
undef, shift() || 3600, $cartid
);
$dbh->do( q{UPDATE shop_carts SET nextscan = UNIX_TIMESTAMP() + ? WHERE cartid = ?},
undef, shift() || 3600, $cartid );
$log->( 'Database error: %s', $dbh->errstr )
if $dbh->err;
return 1;
@ -300,8 +330,8 @@ sub scan_cart {
my $msg = 'scan_cart(%d): ' . shift();
$msg = sprintf( $msg, $cartid, @_ );
$log->( $msg );
$alert->( $msg );
$log->($msg);
$alert->($msg);
return undef;
};
@ -313,19 +343,19 @@ sub scan_cart {
$log->( '-' x 60 );
my $cart = DW::Shop::Cart->get_from_cartid( $cartid );
return $fail->( 'Failed creating cart.' )
my $cart = DW::Shop::Cart->get_from_cartid($cartid);
return $fail->('Failed creating cart.')
unless $cart && ref $cart eq 'DW::Shop::Cart';
# error check this cart
return $fail->( 'Cart not in a valid state.' )
return $fail->('Cart not in a valid state.')
unless $cart->state == $DW::Shop::STATE_PAID;
return $fail->( 'Cart has no items.' )
return $fail->('Cart has no items.')
unless $cart->has_items;
# try to apply each item
my ( $unapplied, %saw_ids ) = ( 0 );
$log->( 'Iterating over items.' );
my ( $unapplied, %saw_ids ) = (0);
$log->('Iterating over items.');
foreach my $item ( @{ $cart->items } ) {
next unless $item->apply_automatically;
@ -333,9 +363,9 @@ sub scan_cart {
# rare case where we've found the cart generating items with the same
# id, leading to failures in sending invite codes
while ( exists $saw_ids{$item->id} ) {
while ( exists $saw_ids{ $item->id } ) {
if ( $item->applied ) {
$log->( 'Item id duplicate, but item safely applied. Ignoring dupe id.' );
$log->('Item id duplicate, but item safely applied. Ignoring dupe id.');
next;
}
@ -345,19 +375,20 @@ sub scan_cart {
}
# record the id in our list so we know we've seen it
$saw_ids{$item->id} = 1;
$saw_ids{ $item->id } = 1;
# this is the normal 'bail' point for already applied items
if ( $item->applied ) {
$log->( 'Item already applied.' );
$log->('Item already applied.');
next;
}
# try to apply it
my $rv = eval { $item->apply };
if ( $rv ) {
$log->( 'Successfully applied item.' );
} else {
if ($rv) {
$log->('Successfully applied item.');
}
else {
$log->( 'Failed to apply item: %s', DW::Pay::error_text() || $@ || 'unknown error' );
$unapplied = 1;
}
@ -368,9 +399,9 @@ sub scan_cart {
# two possible results: we have items still unapplied or we did
# get everything applied. try again in 1-2 hours.
if ( $unapplied ) {
if ($unapplied) {
$nextscan->( 3600 + int( rand() * 3600 ) );
$log->( 'One or more items not applied, will retry later.' );
$log->('One or more items not applied, will retry later.');
return;
}
@ -378,5 +409,5 @@ sub scan_cart {
$cart->state( $DW::Shop::STATE_PROCESSED, no_memcache => 1 );
# main loop done!
$log->( 'Cart->state is now PROCESSED.' );
$log->('Cart->state is now PROCESSED.');
}

View File

@ -186,9 +186,10 @@ sub send_concat_res_response {
my $args = $apache_r->args;
my $uri = $apache_r->uri;
my $dir = ( $LJ::STATDOCS // $LJ::HTDOCS ) . $uri;
my $dir = ( $LJ::STATDOCS // $LJ::HTDOCS ) . $uri;
my $maxdir = ( $LJ::STATDOCS // $LJ::HTDOCS ) . '/max' . $uri;
return 404
unless -d $dir;
unless -d $dir || -d $maxdir;
# Might contain cache buster "?v=3234234234" at the end;
# plus possibly other unique args (caught by the .*)
@ -197,7 +198,7 @@ sub send_concat_res_response {
# Collect each file
my ( $body, $size, $mtime, $mime ) = ( '', 0, 0, undef );
foreach my $file ( split /,/, substr( $args, 1 ) ) {
my $res = load_file_for_concat("$dir$file");
my $res = load_file_for_concat("$dir$file") // load_file_for_concat("$maxdir$file");
return 404
unless defined $res;
$body .= $res->[0];
@ -964,10 +965,11 @@ sub trans {
if ( $uri =~ m!^/api/v(\d+)(/.+)$! ) {
my $ver = $1 + 0;
$ret = DW::Routing->call(
api_version => $ver,
uri => "/v$ver$2",
role => 'api'
apiver => $ver,
uri => "/v$ver$2",
role => 'api'
);
$ret //= DW::Routing->call( uri => "/internal/api/404" );
return $ret if defined $ret;
}

View File

@ -89,11 +89,6 @@ sub _stupid_clean {
return;
}
if ( $reduced =~ /[\x7f-\xff]/ ) {
$$ref = "/* suspect CSS: high bytes */";
return;
}
# returns 1 if something bad was found
my $check_for_bad = sub {
if ( $reduced =~ m!<\w! ) {

View File

@ -174,4 +174,22 @@ sub hash {
return $_[0]->{keyhash};
}
# Usage: get_one (user)
# Given a user, either return the first found key for them, or
# if they have no keys yet, generate one. Intended for use in
# situations where we have a logged in user and want to get a working API
# key for them, without forcing them to jump through the menu hoops themselves.
sub get_one {
my ( $self, $u ) = @_;
my $apikeys = $self->get_keys_for_user($u);
my $key;
if (defined($apikeys->[0])) {
$key = $apikeys ->[0];
} else {
$key = $self->new_for_user($u);
}
return $key;
}
1;

View File

@ -66,10 +66,12 @@ sub param {
# Creates a special instance of DW::API::Parameter object and
# adds it as the requestBody definition for the calling method
sub body {
my ( $self, @args ) = @_;
my $param = DW::API::Parameter->define_parameter(@args);
$self->{requestBody} = $param;
my ( $self, $config ) = @_;
$self->{requestBody}->{required} = $config->{required};
for my $ct ( keys( %{ $config->{content} } ) ) {
my $param = DW::API::Parameter->define_body( $config->{content}->{$ct}, $ct );
$self->{requestBody}{content}{$ct} = $param;
}
}
# Usage: success ( desc, schema )
@ -197,12 +199,25 @@ sub TO_JSON {
$json->{parameters} = [ values %{ $self->{params} } ];
}
if ( defined $self->{requestBody} ) {
$json->{requestBody} = $self->{requestBody};
if ( defined $self->{requestBody}{required} && $self->{requestBody}{required} ) {
$json->{requestBody}{required} = $JSON::true;
}
else {
delete $json->{requestBody}{required};
}
}
my $responses = $self->{responses};
for my $key ( keys %{ $self->{responses} } ) {
$json->{responses}{$key} = { description => $responses->{$key}{desc} };
$json->{responses}{$key}{schema} = $responses->{$key}{schema}
if defined $responses->{$key}{schema};
for my $return_type ( keys %{ $self->{responses}{$key}{content} } ) {
$json->{responses}{$key}{content}{$return_type}{schema} =
$responses->{$key}{content}{$return_type}{schema}
if defined $responses->{$key}{content}{$return_type}{schema};
}
}
return $json;

View File

@ -46,24 +46,37 @@ sub define_parameter {
}
elsif ( defined $args->{content} ) {
$parameter->{content} = $args->{content};
$parameter->{in} = 'requestBody';
}
bless $parameter, $class;
$parameter->_validate;
$parameter->_validate_json;
return $parameter;
}
sub define_body {
my ( $class, $args, $content ) = @_;
my $parameter = { in => 'requestBody', };
if ( defined $args->{schema} ) {
$parameter->{schema} = $args->{schema};
}
bless $parameter, $class;
if ( $content eq 'application/json' ) {
$parameter->_validate_json;
return $parameter;
}
}
# Usage: validate ( Parameter object )
# Does some simple validation checks for parameter objects
# Makes sure required fields are present, and that the
# location given is a valid one.
sub _validate {
sub _validate_json {
my $self = $_[0];
for my $field (@REQ_ATTRIBUTES) {
croak "$self is missing required field $field" unless defined $self->{$field};
}
my $location = $self->{in};
croak "$location isn't a valid parameter location" unless grep( $location, @LOCATIONS );
@ -73,15 +86,8 @@ sub _validate {
croak "Can only define one of content or schema!" if $has_schema && $has_content;
croak "Must define at least one of content or schema!" unless $has_content || $has_schema;
# requestBody is a special instance of Parameter and has stricter rules
if ( $location eq "requestBody" ) {
if ( not defined( keys %{ $self->{content} } ) ) {
croak "requestBody must have at least one content-type!";
}
}
# Run schema validators
DW::Controller::API::REST::schema($self) if ( defined $self->{schema} );
DW::Controller::API::REST::schema($self) if defined $self->{schema};
if ( defined $self->{content} ) {
for my $content_type ( keys %{ $self->{content} } ) {
@ -102,8 +108,11 @@ sub TO_JSON {
in => $self->{in},
};
# Schema fields we need to force to be numeric
if ( defined $self->{schema} ) {
$json->{schema} = $self->{schema};
force_numeric( $json->{schema} );
}
elsif ( defined $self->{content} ) {
$json->{content} = $self->{content};
@ -111,13 +120,41 @@ sub TO_JSON {
# content type is just a hash, but we don't want to print the validator too
for my $content_type ( keys %{ $json->{content} } ) {
delete $json->{content}->{$content_type}{validator};
force_numeric( $json->{content}->{$content_type}{schema} );
}
}
if ( $self->{in} eq "requestBody" ) {
#remove some fields that requestBody doesn't need
delete $json->{in};
delete $json->{name};
delete $json->{description};
}
$json->{required} = $JSON::true if defined $self->{required} && $self->{required};
return $json;
}
sub force_numeric {
my $schema = $_[0];
my @numerics = ( 'minLength', 'maxLength', 'minimum', 'maximum', 'minItems', 'maxItems' );
if ( $schema->{type} eq 'object' ) {
for my $prop ( keys %{ $schema->{properties} } ) {
force_numeric( $schema->{properties}{$prop} );
}
}
elsif ( $schema->{type} eq 'array' ) {
force_numeric( $schema->{items} );
}
else {
foreach my $item (@numerics) {
$schema->{$item} += 0 if defined( $schema->{$item} );
}
}
}
1;

View File

@ -0,0 +1,55 @@
#!/usr/bin/perl
#
# DW::BetaFeatures::Canary
#
# Handler for putting someone in or out of using canary.
#
# Authors:
# Mark Smith <mark@dreamwidth.org>
#
# Copyright (c) 2022 by Dreamwidth Studios, LLC.
#
# This program is free software; you may redistribute it and/or modify it under
# the same terms as Perl itself. For a copy of the license, please reference
# 'perldoc perlartistic' or 'perldoc perlgpl'.
#
package DW::BetaFeatures::Canary;
use strict;
use v5.10;
use Log::Log4perl;
my $log = Log::Log4perl->get_logger(__PACKAGE__);
use base 'LJ::BetaFeatures::default';
use LJ::Session;
sub add_to_beta {
my ( $cls, $u ) = @_;
LJ::Session::set_cookie(
dwcanary => 1,
domain => $LJ::DOMAIN,
path => '/',
http_only => 1,
expires => 365 * 86400,
);
$log->debug( 'Adding ', $u->user, '(', $u->id, ') to canary.' );
}
sub remove_from_beta {
my ( $cls, $u ) = @_;
LJ::Session::set_cookie(
dwcanary => 1,
domain => $LJ::DOMAIN,
path => '/',
http_only => 1,
delete => 1,
);
$log->debug( 'Removing ', $u->user, '(', $u->id, ') from canary.' );
}

View File

@ -282,7 +282,38 @@ sub should_captcha_view {
# passed a captcha. If we have information, then they *have* at some point.
my $info_raw = LJ::MemCache::get($mckey);
unless ($info_raw) {
$log->debug( $mckey, ' has never been seen before, issuing captcha.' );
# Let's see if this is a repeat offender who is spamming requests at us
# and hitting a bunch of 302s -- in which case, temp ban
my $ip = $r->get_remote_ip;
my $mckey = "cct:$ip";
my ( $last_seen_ts, $count ) = split( /:/, LJ::MemCache::get($mckey) // "0:0" );
if ( $last_seen_ts > 0 ) {
# Subtract out
my $intervals = int( ( time() - $last_seen_ts ) / $LJ::CAPTCHA_FRAUD_INTERVAL_SECS );
if ( $intervals > 1 ) {
$count -= $LJ::CAPTCHA_FRAUD_FORGIVENESS_AMOUNT * $intervals;
$count = 0
if $count < 0;
}
}
# Set the counter
$log->debug( $ip, ' has seen ', $count + 1, ' captcha requests.' );
LJ::MemCache::set(
$mckey,
join( ':', time(), $count + 1 ),
$LJ::CAPTCHA_FRAUD_INTERVAL_SECS * $LJ::CAPTCHA_FRAUD_LIMIT
);
# Now the trigger interval, if it's over, sysban this IP but just carry
# on with rendering this page (simpler)
if ( $count >= $LJ::CAPTCHA_FRAUD_LIMIT ) {
$log->info( 'Banning ', $ip, ' for exceeding captcha fraud threshold.' );
LJ::Sysban::tempban_create( ip => $ip, $LJ::CAPTCHA_FRAUD_SYSBAN_SECS );
}
return 1;
}
@ -292,7 +323,7 @@ sub should_captcha_view {
# If the first request is too long ago, then re-captcha
if ( ( time() - $first_req_ts ) > $LJ::CAPTCHA_RETEST_INTERVAL_SECS ) {
$log->debug( $mckey, ' has exceeded the retest interval, issuing captcha.' );
$log->info( $mckey, ' has exceeded the retest interval, issuing captcha.' );
return 1;
}
@ -307,11 +338,11 @@ sub should_captcha_view {
$last_req_ts = time();
}
# Log the things
$log->debug( $mckey, ' has ', $remaining, ' uses remaining.' );
# If we are out of requests, retest
return 1 if $remaining <= 0;
if ( $remaining <= 0 ) {
$log->info( $mckey, ' is out of requests by usage, retesting.' );
return 1;
}
# Things look good, so let's allow this to continue but update remaining
LJ::MemCache::set( $mckey, join( ':', $first_req_ts, $last_req_ts, $remaining - 1 ) );
@ -325,7 +356,7 @@ sub record_success {
return 0 unless $LJ::CAPTCHA_HCAPTCHA_SITEKEY;
my $mckey = _captcha_mckey();
$log->debug( 'Recording success for: ', $mckey );
$log->debug( 'Captcha success for: ', $mckey );
LJ::MemCache::set( $mckey, join( ':', time(), time(), $LJ::CAPTCHA_INITIAL_REMAINING ) );
}

View File

@ -101,7 +101,7 @@ sub render_success {
# - skip_captcha => 1 -- (DANGEROUS) do not ever captcha on this endpoint.
#
# Returns one of:
# - 0, $error_text (if there's an error)
# - 0, $error_response (if there's an error)
# - 1, $hashref (if everything looks good)
#
# Returned hashref can be passed to DW::Template->render_template as the 2nd

View File

@ -2,7 +2,7 @@
#
# DW::Controller::API::REST
#
#
# REST API.
#
# Authors:
# Allen Petersen <allen@suberic.net>
@ -17,20 +17,23 @@
package DW::Controller::API::REST;
use strict;
use warnings;
use DW::Request;
use DW::Routing;
use DW::Controller;
use DW::Controller::API;
use DW::API::Parameter;
use DW::API::Method;
use DW::API::Key;
use JSON;
use YAML::XS qw'LoadFile';
use JSON::Validator 'validate_json';
use Hash::MultiValue;
use v5.10;
use Log::Log4perl;
my $log = Log::Log4perl->get_logger(__PACKAGE__);
use Carp qw/ croak /;
use Hash::MultiValue;
use JSON;
use JSON::Validator 'validate_json';
use YAML::XS qw'LoadFile';
use DW::API::Key;
use DW::API::Method;
use DW::API::Parameter;
use DW::Controller;
use DW::Controller::API;
use DW::Request;
use DW::Routing;
our %API_DOCS = ();
our %TYPE_REGEX = (
@ -38,8 +41,8 @@ our %TYPE_REGEX = (
integer => '(\d+)',
boolean => '(true|false)',
);
our %METHODS = ( get => 1, post => 1, delete => 1 );
our $API_PATH = "$ENV{LJHOME}/api/";
our %METHODS = ( get => 1, post => 1, delete => 1, put => 1 );
our $API_PATH = "$ENV{LJHOME}/api/dist/";
# Usage: path ( yaml_source_path, ver, hash_of_HTTP_handlers )
# Creates a new path object for use in DW::Controller::API::REST
@ -146,11 +149,14 @@ sub _dispatcher {
my $r = $rv->{r};
my $keystr = $r->header_in('Authorization');
$keystr =~ s/Bearer (\w+)/$1/;
my $apikey = DW::API::Key->get_key($keystr);
my $apikey;
if ( defined $keystr ) {
$keystr =~ s/Bearer (\w+)/$1/;
$apikey = DW::API::Key->get_key($keystr);
}
# all paths require an API key except the spec (which informs users that they need a key and where to put it)
unless ( $apikey || $self->{path}{name} eq "/spec" ) {
unless ( defined($apikey) || $self->{path}{name} eq "/spec" ) {
$r->print( to_json( { success => 0, error => "Missing or invalid API key" } ) );
$r->status('401');
return;
@ -169,21 +175,24 @@ sub _dispatcher {
# check path-level parameters.
for my $param ( keys %{ $self->{path}{params} } ) {
_validate_param( $param, $self->{path}{params}{$param}, $r, $path_params, $args );
my $valid =
_validate_param( $param, $self->{path}{params}{$param}, $r, $path_params, $args );
return unless $valid;
}
my $method = lc $r->method;
my $handler = $self->{path}{methods}->{$method}->{handler};
my $method_self = $self->{path}{methods}->{$method};
# check method-level parameters
for my $param ( keys %{ $method_self->{params} } ) {
_validate_param( $param, $self->{params}{$param}, $r, $args );
my $valid = _validate_param( $param, $method_self->{params}{$param}, $r, undef, $args );
return unless $valid;
}
# if we accept a request body, validate that too.
if ( defined $method_self->{requestBody} ) {
_validate_body( $method_self->{requestBody}, $r, $args );
my $valid = _validate_body( $method_self->{requestBody}, $r, $args );
return unless $valid;
}
# some handlers need to know what version they are
@ -196,7 +205,7 @@ sub _dispatcher {
# Generic response for unimplemented API methods.
$r->print( to_json( { success => 0, error => "Not Implemented" } ) );
$r->status('501');
return;
return $r->OK;
}
}
@ -235,10 +244,14 @@ sub _validate_param {
unless ( defined $p ) {
$r->print( to_json( { success => 0, error => "Missing required parameter $param" } ) );
$r->status('400');
return;
return 0;
}
}
# non-required parameters may be undef without it being an error
# but we shouldn't try to validate them if they're undef.
return 1 unless ( defined $p );
# run the schema validator
my @errors = $pval->validate($p);
if (@errors) {
@ -246,10 +259,11 @@ sub _validate_param {
$r->print(
to_json( { success => 0, error => "Bad format for $param. Errors: $err_str" } ) );
$r->status('400');
return;
return 0;
}
$arg_obj->{$ploc}{$param} = $p;
return 1;
}
# Usage: _validate_body (requestBody config, request, arg object)
@ -263,9 +277,9 @@ sub _validate_param {
sub _validate_body {
my ( $config, $r, $arg_obj ) = @_;
my $preq = $config->{required};
my $content_type = lc $r->header_in('Content-Type');
$content_type =~ s/;.*//; # drop data that isn't the MIMEtype
my $p;
if ( $content_type eq 'application/json' ) {
@ -274,6 +288,11 @@ sub _validate_body {
elsif ( $content_type eq 'application/x-www-form-urlencoded' ) {
$p = $r->post_args;
}
elsif ( $content_type eq 'application/octet-stream' ) {
# TODO: CHICKEN: IMPLEMENT
die "not implemented yet\n";
}
elsif ( $content_type eq 'multipart/form-data' ) {
# uploads are an array of hashrefs, so we convert to Hash::MultiValue for simplicty
@ -284,6 +303,9 @@ sub _validate_body {
}
$p = $upload_hash;
}
else {
warn "Unexpected content-type $content_type";
}
# make sure that required parameters are supplied
if ($preq) {
@ -291,21 +313,26 @@ sub _validate_body {
$r->print(
to_json( { success => 0, error => "Missing or badly formatted request!" } ) );
$r->status('400');
return;
return 0;
}
}
# non-required parameters may be undef without it being an error
# but we shouldn't try to validate them if they're undef.
#return 1 unless ( defined $p && defined($config->{content}->{$content_type}{validator}));
# run the schema validator
my @errors = $config->{content}->{$content_type}{validator}->validate($p);
my @errors = $config->{content}{$content_type}{validator}->validate($p);
if (@errors) {
my $err_str = join( ', ', map { $_->{message} } @errors );
$r->print(
to_json( { success => 0, error => "Bad format for request body. Errors: $err_str" } ) );
$r->status('400');
return;
return 0;
}
$arg_obj->{body} = $p;
return 1;
}
# Usage: schema ($object_ref)
@ -354,4 +381,77 @@ sub TO_JSON {
return $json;
}
sub params {
my $self = $_[0];
my $parameters = [ values %{ $self->{path}{params} } ];
return $parameters;
}
sub methods {
my $self = $_[0];
my $methods = $self->{path}{methods};
return $methods;
}
sub to_template {
my $self = $_[0];
my $parameters = [ values %{ $self->{path}{params} } ];
my $methods = $self->{path}{methods};
my $vars = {
params => $parameters,
methods => $methods
};
return DW::Template->render_template( 'api/path.tt', $vars, { no_sitescheme => 1 } );
}
DW::Routing->register_string( '/api', \&api_handler, app => 1 );
DW::Routing->register_string( '/api/', \&api_handler, app => 1 );
sub api_handler {
my ( $ok, $rv ) = controller();
return $rv unless $ok;
my $r = $rv->{r};
my $u = $rv->{u};
my $remote = $rv->{remote};
my %api = %API_DOCS;
my $paths = $api{1};
my $vars;
$vars->{paths} = $paths;
$vars->{key} = DW::API::Key->get_one($remote);
return DW::Template->render_template( 'api.tt', $vars );
}
DW::Routing->register_string( '/api/getkey', \&key_handler, app => 1 );
sub key_handler {
my ( $ok, $rv ) = controller();
return $rv unless $ok;
my $r = $rv->{r};
my $remote = $rv->{remote};
my $key = DW::API::Key->get_one($remote);
$r->status(200);
$r->content_type('text/plain; charset=utf-8');
$r->print( $key->{keyhash} );
return $r->OK;
}
DW::Routing->register_string( '/internal/api/404', \&api_404_handler, app => 1 );
sub api_404_handler {
my ( $ok, $rv ) = controller( anonymous => 1 );
return $rv unless $ok;
my $r = $rv->{r};
$r->status(404);
$r->content_type('application/json; charset=utf-8');
$r->print( to_json( { success => 0, error => "Not found." } ) );
return $r->OK;
}
1;

View File

@ -27,6 +27,7 @@ sub rest_get {
my ( $self, $args ) = @_;
my $u = LJ::load_user( $args->{path}{username} );
return $self->rest_error("404") unless defined $u;
# if we're given a picid, try to load that userpic
if ( defined( $args->{path}{picid} ) && $args->{path}{picid} ne "" ) {

View File

@ -42,22 +42,25 @@ sub _spec_20 {
my $security_defs =
{ "api_key" =>
{ "type" => "http", "scheme" => "Bearer", "bearerFormat" => "Bearer <api_key>" } };
{ "type" => "http", "scheme" => "bearer", "bearerFormat" => "Bearer <api_key>" } };
my @security = map {
{ $_ => [] }
} keys(%$security_defs);
my %spec = (
openapi => '3.0.0',
servers => (
servers => [
{
url => "$LJ::WEB_DOMAIN/api/v$ver"
url => "$LJ::SITEROOT/api/v$ver"
},
),
],
info => {
title => "$LJ::SITENAME API",
description => "An OpenAPI-compatible API for $LJ::SITENAME",
version => $ver,
},
security => keys(%$security_defs),
security => \@security,
components => {
securitySchemes => $security_defs,
}

View File

@ -42,40 +42,9 @@ sub captcha_handler {
return $rv unless $ok;
my $r = DW::Request->get;
my $ip = $r->get_remote_ip;
my $get_args = $r->get_args;
# Before we render, let's see if we're in a state where someone is getting
# sent to captcha-land in a too aggressive fashion. If so, we want to
# apply a sysban to the IP so we stop wasting resources.
my $ip = $r->get_remote_ip;
my $mckey = "cct:$ip";
my ( $last_seen_ts, $count ) = split( /:/, LJ::MemCache::get($mckey) // "0:0" );
if ( $last_seen_ts > 0 ) {
# Subtract out
my $intervals = int( ( time() - $last_seen_ts ) / $LJ::CAPTCHA_FRAUD_INTERVAL_SECS );
if ( $intervals > 1 ) {
$count -= $LJ::CAPTCHA_FRAUD_FORGIVENESS_AMOUNT * $intervals;
$count = 0
if $count < 0;
}
}
# Set the counter
$log->debug( $ip, ' has seen ', $count + 1, ' captcha requests.' );
LJ::MemCache::set(
$mckey,
join( ':', time(), $count + 1 ),
$LJ::CAPTCHA_FRAUD_INTERVAL_SECS * $LJ::CAPTCHA_FRAUD_LIMIT
);
# Now the trigger interval, if it's over, sysban this IP but just carry
# on with rendering this page (simpler)
if ( $count >= $LJ::CAPTCHA_FRAUD_LIMIT ) {
$log->info( 'Banning ', $ip, ' for exceeding captcha fraud threshold.' );
LJ::Sysban::tempban_create( ip => $ip, $LJ::CAPTCHA_FRAUD_SYSBAN_SECS );
}
# Renderer for GETs
my $render_captcha_form = sub {
return DW::Template->render_template( 'auth/captcha.tt',

View File

@ -25,7 +25,6 @@ use DW::Routing;
use DW::Template;
use DW::FormErrors;
use LJ::Hooks;
use Data::Dumper;
DW::Routing->register_string( '/inbox/new', \&index_handler, app => 1 );
DW::Routing->register_string( '/inbox/new/compose', \&compose_handler, app => 1 );
@ -133,6 +132,7 @@ sub index_handler {
elsif ( $view eq "singleentry" ) {
$mark_all_text = "widget.inbox.menu.mark_all_read.entry.btn";
$delete_all_text = "widget.inbox.menu.delete_all.entry.btn";
$vars->{itemid} = $itemid;
}
else {
$mark_all_text = "widget.inbox.menu.mark_all_read.subfolder.btn";
@ -143,6 +143,9 @@ sub index_handler {
$vars->{delete_all} = $delete_all_text;
$vars->{img} = &LJ::img;
# TODO: Remove this when beta is over
$vars->{dw_beta} = LJ::load_user('dw_beta');
return DW::Template->render_template( 'inbox/index.tt', $vars );
}
@ -153,7 +156,7 @@ sub render_items {
or return error_ml( "/inbox/index.tt.error.couldnt_retrieve_inbox",
{ 'user' => $remote->{user} } );
my $starting_index = ( $page - 1 ) * $PAGE_LIMIT;
my $ending_index = $starting_index + $PAGE_LIMIT;
my $ending_index = $starting_index - 1 + $PAGE_LIMIT;
my @display_items = @$items_ref;
@display_items = sort { $b->when_unixtime <=> $a->when_unixtime } @display_items;
@display_items = @display_items[ $starting_index .. $ending_index ];
@ -369,6 +372,7 @@ sub items_by_view {
else {
@all_items = $inbox->all_items;
}
return \@all_items;
}

View File

@ -32,13 +32,17 @@ sub ban_handler {
my $r = DW::Request->get;
my $u = $rv->{remote};
my $POST = $r->post_args;
my $GET = $r->get_args;
my $u = $rv->{u};
my $remote = $rv->{remote};
my $POST = $r->post_args;
my $GET = $r->get_args;
my $submit_msg = 0;
my %editvals;
die "User cannot modify this community"
unless $remote->can_manage($u);
if ( $r->did_post ) {
# check to see if we're doing a note edit instead

View File

@ -39,6 +39,7 @@ sub login_handler {
if ( $adminmode && $user ) {
$u = LJ::load_user($user);
return error_ml('error.username_notfound') unless $u;
return error_ml('error.purged.text') if $u->is_expunged;
$user = undef if $rv->{remote}->equals($u);
}
else {
@ -50,13 +51,25 @@ sub login_handler {
if ( $r->did_post ) {
# Form auth is automagically checked.
if ( !$user ) {
my $sid = $r->post_args->{session};
$sessions->{$sid}->destroy if $sessions->{$sid};
# Does not support editing another user's sessions, so bail
return $r->redirect( LJ::create_url() )
if $user;
if ( $r->post_args->{logout} eq 'some' ) {
foreach my $arg ( keys %{ $r->post_args } ) {
next unless $arg =~ /^logout:(\d+)$/;
$sessions->{$1}->destroy if exists $sessions->{$1};
}
}
return $r->redirect( LJ::create_url(undef) );
elsif ( $r->post_args->{logout} eq 'all' ) {
foreach my $sess ( values %$sessions ) {
$sess->destroy;
}
}
return $r->redirect( LJ::create_url() );
}
my $sth = $u->prepare("SELECT logintime, sessid, ip, ua FROM loginlog WHERE userid=?")
or die('Unable to prepare loginlog');
$sth->execute( $u->userid )

View File

@ -44,13 +44,13 @@ sub _shop_controller {
# if payments are disabled, do nothing
unless ( LJ::is_enabled('payments') ) {
$r->redirect("$LJ::SITEROOT/");
return ( 0, LJ::Lang::ml('shop.unavailable') );
return ( 0, error_ml('shop.unavailable') );
}
# if they're banned ...
my $err = DW::Shop->remote_sysban_check;
return ( 0, $err ) if $err;
if ( my $err = DW::Shop->remote_sysban_check ) {
return ( 0, DW::Template->render_template( 'error.tt', { message => $err } ) );
}
# basic controller setup
my ( $ok, $rv ) = controller(%args);
@ -83,6 +83,8 @@ sub shop_index_handler {
my ( $ok, $rv ) = _shop_controller( anonymous => 1 );
return $rv unless $ok;
$rv->{shop_config} = \%LJ::SHOP;
return DW::Template->render_template( 'shop/index.tt', $rv );
}
@ -270,6 +272,8 @@ sub shop_points_handler {
$rv->{errs} = \%errs;
my $r = DW::Request->get;
return $r->redirect("$LJ::SITEROOT/shop") unless exists $LJ::SHOP{points};
if ( $r->did_post ) {
my $args = $r->post_args;
die "invalid auth\n" unless LJ::check_form_auth( $args->{lj_form_auth} );
@ -338,6 +342,8 @@ sub shop_icons_handler {
$rv->{errs} = \%errs;
my $r = DW::Request->get;
return $r->redirect("$LJ::SITEROOT/shop") unless exists $LJ::SHOP{icons};
if ( $r->did_post ) {
my $args = $r->post_args;
die "invalid auth\n" unless LJ::check_form_auth( $args->{lj_form_auth} );

View File

@ -132,7 +132,6 @@ sub enter_cc_handler {
# load country codes, and US states
my ( %countries, %usstates );
DW::Countries->load( \%countries );
delete $countries{UK}; # UK is also GB; don't display both
LJ::load_codes( { state => \%usstates } );
# now sort the above appropriately

View File

@ -57,7 +57,7 @@ sub main_handler {
{ # load country and state stats
my %countries;
DW::Countries->load( \%countries );
DW::Countries->load_legacy( \%countries );
$sth = $dbr->prepare(
"SELECT statkey, statval FROM stats WHERE statcat='country'
ORDER BY statval DESC LIMIT 15"
@ -183,9 +183,6 @@ sub main_handler {
}
my %graphs = ( newbyday => 'stats/newbyday.png' );
foreach ( keys %graphs ) {
delete $graphs{$_} unless -f "$LJ::HTDOCS/$graphs{$_}";
}
my $vars = {
stat => \%stat,

View File

@ -166,9 +166,11 @@ sub faqbrowse_handler {
my $GET = $r->get_args;
my $user;
my $user_url;
my $vars;
my $vars = {};
if ($remote) {
$vars->{remote} = $remote;
$user = $remote->user;
$user_url = $remote->journal_base;
}

View File

@ -417,13 +417,17 @@ sub tellafriend_handler {
$u->{'emailpref'} = $u->site_email_alias;
}
my $news_journal = LJ::load_user($LJ::NEWS_JOURNAL);
my $news_url = LJ::isu($news_journal) ? $news_journal->journal_base : '';
my $footer_ml = LJ::isu($news_journal) ? 'footer.news' : 'footer';
my $msg_footer = LJ::Lang::ml(
"$scope.email.body.footer1",
"$scope.email.body.$footer_ml",
{
user => $u->{user},
sitename => $LJ::SITENAME,
sitenameshort => $LJ::SITENAMESHORT,
domain => $LJ::DOMAIN
news_url => $news_url
}
);
my $custom_msg = "\n\n" . LJ::Lang::ml( "$scope.email.body.custom", { user => $u->{user} } );
@ -446,12 +450,13 @@ sub tellafriend_handler {
}
# Check for images
if ( $post_args->{'body'} =~ /<(img|forbiddenimages)\s+src/i ) {
my $custom_body = $post_args->{'body'} // '';
if ( $custom_body =~ /<(img|image)\s+src/i ) {
$errors->add( 'body', ".error.forbiddenimages" );
}
# Check for external URLs
foreach ( LJ::get_urls( $post_args->{'body'} ) ) {
foreach ( LJ::get_urls($custom_body) ) {
if ( $_ !~ m!^https?://([\w-]+\.)?$LJ::DOMAIN(/.*)?$!i ) {
$errors->add( 'body', ".error.forbiddenurl", { sitename => $LJ::SITENAME } );
}
@ -465,32 +470,36 @@ sub tellafriend_handler {
# All valid, go ahead and send
my $msg_body = $post_args->{'body_start'};
if ( $post_args->{'body'} ne '' ) {
$msg_body .= $custom_msg . "\n-----\n" . $post_args->{'body'} . "\n-----";
if ( $custom_body ne '' ) {
$msg_body .= $custom_msg . "\n-----\n" . $custom_body . "\n-----";
}
$msg_body .= $msg_footer;
LJ::send_mail(
{
'to' => $toemail,
'from' => $u->{'emailpref'},
'from' => $LJ::BOGUS_EMAIL,
'fromname' => $u->user . LJ::Lang::ml("$scope.via") . " $LJ::SITENAMESHORT",
'charset' => 'utf-8',
'subject' => $post_args->{'subject'},
'body' => $msg_body,
'headers' => {
'Reply-To' => qq{"$u->{user}" <$u->{emailpref}>},
}
}
);
my $tolist = $toemail;
$tolist =~ s/(,\s*)/<br \/>/g;
$r->add_msg( LJ::Lang::ml('$scope.sentpage.body.mailedlist') . "<br />" . $tolist,
$r->add_msg( LJ::Lang::ml("$scope.sentpage.body.mailedlist") . "<br />" . $tolist,
$r->SUCCESS );
}
}
my ( $subject, $msg );
$subject = LJ::Lang::ml("$scope.email.subject.noentry");
if ( $itemid =~ /^\d+$/ ) {
$msg = '';
if ( defined $itemid && $itemid =~ /^\d+$/ ) {
my $uj = LJ::load_user($journal);
return error_ml("$scope.error.unknownjournal") unless $uj;
@ -532,7 +541,7 @@ sub tellafriend_handler {
{ sitenameshort => $LJ::SITENAMESHORT } );
}
if ( $get_args->{'user'} =~ /^\w{1,$LJ::USERNAME_MAXLENGTH}$/ ) {
if ( defined $get_args->{'user'} && $get_args->{'user'} =~ /^\w{1,$LJ::USERNAME_MAXLENGTH}$/ ) {
my $user = $get_args->{'user'};
my $uj = LJ::load_user($user);
my $url = $uj->journal_base;
@ -564,12 +573,12 @@ sub tellafriend_handler {
my $vars = {
'u' => $u,
'errors' => $errors,
'formdata' => $r->post_args || $default_formdata,
'formdata' => $r->did_post ? $r->post_args : $default_formdata,
'display_msg' => $display_msg,
'display_msg_footer' => $display_msg_footer,
'email_checkbox' => $email_checkbox
};
return DW::Template->render_template( 'tools/emailmanage.tt', $vars );
return DW::Template->render_template( 'tools/tellafriend.tt', $vars );
}
1;

View File

@ -48,7 +48,21 @@ sub load {
foreach my $code ( all_country_codes() ) {
$countries->{ uc $code } = code2country($code);
}
$countries->{UK} = $countries->{GB};
}
=head2 C<< DW::Countries->load_legacy( $hashref ) >>
Adds some additional legacy codes for displaying older data where appropriate.
=cut
sub load_legacy {
my ( $class, $countries ) = @_;
$class->load($countries);
$countries->{LJSC} = $countries->{GB}; # Scotland
$countries->{UK} = $countries->{GB}; # United Kingdom
}
1;

View File

@ -51,4 +51,60 @@ LJ::Hooks::register_hook(
}
);
LJ::Hooks::register_hook(
'finduser_delve',
sub {
my ($us) = @_;
my @users = sort { $a->user cmp $b->user } grep { !$_->is_community } values %$us;
my $ret = '';
my @paid;
foreach my $u (@users) {
my %ok = ( $DW::Shop::STATE_PAID => 1, $DW::Shop::STATE_PROCESSED => 1 );
my @carts = grep { $ok{ $_->state } } DW::Shop::Cart->get_all($u);
push @paid, $u if @carts;
}
$ret .= sprintf( "%d accounts with payment history:\n", scalar @paid );
$ret .= sprintf( "%s\n", $_->user ) foreach @paid;
# infohistory
my $dbh = LJ::get_db_reader();
my $sth = $dbh->prepare("SELECT * FROM infohistory WHERE userid=?");
my %emails;
my %seen;
foreach my $u (@users) {
$sth->execute( $u->id );
next unless $sth->rows;
while ( my $info = $sth->fetchrow_hashref ) {
if ( $info->{what} && $info->{what} eq 'email' ) {
my $e = $info->{oldvalue};
$emails{$e} ||= [];
push @{ $emails{$e} }, $u->user;
$seen{ $u->user } = 1;
}
}
}
if ( my $num_changed = scalar keys %seen ) {
$ret .= sprintf( "%d additional historical email addresses on %d accounts:\n",
scalar keys %emails, $num_changed );
}
foreach my $e ( sort keys %emails ) {
$ret .= sprintf( "%s: used by %s\n", $e, join( ', ', @{ $emails{$e} } ) );
}
return $ret;
}
);
1;

View File

@ -3,7 +3,7 @@
# DW::Hooks::PrivList
#
# This module implements the listing of valid arguments for each
# known user privilege in dw-free. Any site that defines a different
# known user privilege in Dreamwidth. Any site that defines a different
# set of privs or privargs must create additional hooks to supplement
# this list.
#

View File

@ -0,0 +1,52 @@
#!/usr/bin/perl
#
# DW::Hooks::ProfileSave
#
# This module implements a hook for lightweight logging of uids
# who save profile edits, for later examination to detect
# accounts being used for spam purposes.
#
# Authors:
# Jen Griffin <kareila@livejournal.com>
#
# Copyright (c) 2022 by Dreamwidth Studios, LLC.
#
# This program is free software; you may redistribute it and/or modify it under
# the same terms as Perl itself. For a copy of the license, please reference
# 'perldoc perlartistic' or 'perldoc perlgpl'.
#
package DW::Hooks::ProfileSave;
use strict;
use warnings;
use LJ::Hooks;
use LJ::MemCache;
LJ::Hooks::register_hook(
'profile_save',
sub {
my ( $u, $saved, $post ) = @_;
return unless defined $u && defined $saved && defined $post;
my $log_edit = 1;
# only log this if the URL changed and is non-null
my $oldurl = $saved->{url} // '';
my $newurl = $post->{url} // '';
$log_edit = 0 if $oldurl eq $newurl;
$log_edit = 0 unless $newurl;
return unless $log_edit;
# set the new key - expires after a week if no activity
my $memval = LJ::MemCache::get('profile_editors') // [];
push @$memval, $u->id;
LJ::MemCache::set( 'profile_editors', $memval, 86400 * 7 );
}
);
1;

Some files were not shown because too many files have changed in this diff Show More