first commit

This commit is contained in:
Michael Dong
2026-02-05 11:24:40 +08:00
commit a98e12f286
144 changed files with 26459 additions and 0 deletions

11
.env.example Normal file
View File

@@ -0,0 +1,11 @@
# 数据库配置
POSTGRES_USER=notify
POSTGRES_PASSWORD=your_secure_password_here
POSTGRES_DB=notify
# JWT 密钥(建议使用: openssl rand -base64 32
JWT_SECRET=your_jwt_secret_here
# 域名配置
BASE_URL=https://notify.example.com
NEXT_PUBLIC_API_URL=https://notify.example.com

12
.gitignore vendored Normal file
View File

@@ -0,0 +1,12 @@
.cursor
.vscode
.idea
.DS_Store
.env
.env.local
.env.development.local
.env.test.local
.env.production.local
node_modules
dist
build

76
Makefile Normal file
View File

@@ -0,0 +1,76 @@
.PHONY: dev stop migrate migrate-deploy db-push db-studio dev-node
ifneq (,$(wildcard .env))
include .env
export
endif
# Rust backend (default)
dev:
@echo "Starting Notify dev services (Rust backend)..."
@nohup sh -c 'cd backend_rust && DATABASE_URL="$${DATABASE_URL}" JWT_SECRET="$${JWT_SECRET:-dev-secret}" RUST_LOG="info" cargo run' > /tmp/notify-api.log 2>&1 &
@nohup sh -c 'cd frontend && PORT="$${FRONTEND_PORT:-3000}" NEXT_PUBLIC_API_BASE="$${NEXT_PUBLIC_API_BASE:-http://localhost:4000}" npm run dev' > /tmp/notify-web.log 2>&1 &
@echo "API log: /tmp/notify-api.log"
@echo "Web log: /tmp/notify-web.log"
# Node.js backend (legacy)
dev-node:
@echo "Starting Notify dev services (Node.js backend)..."
@nohup sh -c 'cd backend && DATABASE_URL="$${DATABASE_URL}" JWT_SECRET="$${JWT_SECRET:-dev-secret}" PORT="$${PORT:-4000}" npm run dev' > /tmp/notify-api.log 2>&1 &
@nohup sh -c 'cd backend && DATABASE_URL="$${DATABASE_URL}" JWT_SECRET="$${JWT_SECRET:-dev-secret}" npm run worker' > /tmp/notify-worker.log 2>&1 &
@nohup sh -c 'cd frontend && PORT="$${FRONTEND_PORT:-3000}" NEXT_PUBLIC_API_BASE="$${NEXT_PUBLIC_API_BASE:-http://localhost:4000}" npm run dev' > /tmp/notify-web.log 2>&1 &
@echo "API log: /tmp/notify-api.log"
@echo "Worker log: /tmp/notify-worker.log"
@echo "Web log: /tmp/notify-web.log"
stop:
@echo "Stopping Notify dev services..."
@pkill -f "target/debug/backend_rust" || true
@pkill -f "tsx watch src/index.ts" || true
@pkill -f "tsx src/worker.ts" || true
@pkill -f "next dev" || true
@echo "Stopped."
# SeaORM migrations (Rust)
migrate:
@echo "Running SeaORM migration..."
cd backend_rust && DATABASE_URL="$${DATABASE_URL}" cargo run --package migration -- up
migrate-down:
@echo "Rolling back last migration..."
cd backend_rust && DATABASE_URL="$${DATABASE_URL}" cargo run --package migration -- down
migrate-fresh:
@echo "Resetting database and running all migrations..."
cd backend_rust && DATABASE_URL="$${DATABASE_URL}" cargo run --package migration -- fresh
migrate-status:
@echo "Checking migration status..."
cd backend_rust && DATABASE_URL="$${DATABASE_URL}" cargo run --package migration -- status
# Prisma migrations (Node.js legacy)
migrate-prisma:
@echo "Running Prisma migration (dev)..."
cd backend && npx prisma migrate dev
migrate-prisma-deploy:
@echo "Running Prisma migration (deploy)..."
cd backend && npx prisma migrate deploy
db-push:
@echo "Pushing schema to database..."
cd backend && npx prisma db push
db-studio:
@echo "Opening Prisma Studio..."
cd backend && npx prisma studio
# Build
build:
@echo "Building Rust backend..."
cd backend_rust && cargo build --release
# Generate SeaORM entities from database
generate-entities:
@echo "Generating SeaORM entities..."
cd backend_rust && sea-orm-cli generate entity -o src/entity --with-serde both

8
backend/.dockerignore Normal file
View File

@@ -0,0 +1,8 @@
node_modules
dist
.env
.env.*
*.log
.git
.gitignore
README.md

8
backend/.gitignore vendored Normal file
View File

@@ -0,0 +1,8 @@
node_modules
dist
build
.env
.env.local
.env.development.local
.env.test.local
.env.production.local

44
backend/Dockerfile Normal file
View File

@@ -0,0 +1,44 @@
# Build stage
FROM node:20-alpine AS builder
WORKDIR /app
# Copy package files
COPY package*.json ./
COPY prisma ./prisma/
# Install dependencies
RUN npm ci
# Generate Prisma client
RUN npx prisma generate
# Copy source code
COPY . .
# Build TypeScript
RUN npm run build
# Production stage
FROM node:20-alpine AS runner
WORKDIR /app
ENV NODE_ENV=production
# Copy package files and install production dependencies only
COPY package*.json ./
COPY prisma ./prisma/
RUN npm ci --omit=dev
# Generate Prisma client in production
RUN npx prisma generate
# Copy built files from builder
COPY --from=builder /app/dist ./dist
# Run migrations and start the app
EXPOSE 4000
CMD ["sh", "-c", "npx prisma migrate deploy && node dist/index.js"]

1821
backend/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

33
backend/package.json Normal file
View File

@@ -0,0 +1,33 @@
{
"name": "notify-backend",
"version": "0.1.0",
"private": true,
"main": "dist/index.js",
"scripts": {
"dev": "tsx watch src/index.ts",
"build": "tsc -p tsconfig.json",
"start": "node dist/index.js",
"worker": "tsx src/worker.ts",
"prisma:generate": "prisma generate",
"prisma:migrate": "prisma migrate dev"
},
"dependencies": {
"@prisma/client": "^5.13.0",
"bcryptjs": "^2.4.3",
"cors": "^2.8.5",
"dotenv": "^16.4.5",
"express": "^4.19.2",
"express-rate-limit": "^7.3.1",
"helmet": "^7.1.0",
"jsonwebtoken": "^9.0.2",
"zod": "^3.23.8"
},
"devDependencies": {
"@types/express": "^4.17.21",
"@types/jsonwebtoken": "^9.0.6",
"@types/node": "^20.14.2",
"prisma": "^5.13.0",
"tsx": "^4.16.0",
"typescript": "^5.5.2"
}
}

View File

@@ -0,0 +1,203 @@
-- CreateEnum
CREATE TYPE "RecurrenceType" AS ENUM ('hourly', 'daily', 'weekly', 'monthly', 'yearly');
-- CreateEnum
CREATE TYPE "TargetType" AS ENUM ('todo', 'reminder_task');
-- CreateEnum
CREATE TYPE "ChannelType" AS ENUM ('inapp', 'bark');
-- CreateEnum
CREATE TYPE "NotificationStatus" AS ENUM ('pending', 'queued', 'sent', 'failed');
-- CreateTable
CREATE TABLE "User" (
"id" TEXT NOT NULL,
"username" TEXT NOT NULL,
"passwordHash" TEXT NOT NULL,
"timezone" TEXT NOT NULL DEFAULT 'Asia/Shanghai',
"barkUrl" TEXT,
"inappEnabled" BOOLEAN NOT NULL DEFAULT true,
"barkEnabled" BOOLEAN NOT NULL DEFAULT false,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL,
CONSTRAINT "User_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "Invite" (
"id" TEXT NOT NULL,
"code" TEXT NOT NULL,
"creatorId" TEXT NOT NULL,
"maxUses" INTEGER NOT NULL DEFAULT 5,
"usedCount" INTEGER NOT NULL DEFAULT 0,
"expiresAt" TIMESTAMP(3) NOT NULL,
"revokedAt" TIMESTAMP(3),
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "Invite_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "RecurrenceRule" (
"id" TEXT NOT NULL,
"type" "RecurrenceType" NOT NULL,
"interval" INTEGER NOT NULL DEFAULT 1,
"byWeekday" INTEGER,
"byMonthday" INTEGER,
"timezone" TEXT NOT NULL DEFAULT 'Asia/Shanghai',
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL,
CONSTRAINT "RecurrenceRule_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "Todo" (
"id" TEXT NOT NULL,
"ownerId" TEXT NOT NULL,
"title" TEXT NOT NULL,
"description" TEXT,
"dueAt" TIMESTAMP(3) NOT NULL,
"recurrenceRuleId" TEXT,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL,
CONSTRAINT "Todo_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "ReminderTask" (
"id" TEXT NOT NULL,
"creatorId" TEXT NOT NULL,
"title" TEXT NOT NULL,
"description" TEXT,
"dueAt" TIMESTAMP(3) NOT NULL,
"recurrenceRuleId" TEXT,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL,
CONSTRAINT "ReminderTask_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "ReminderTaskRecipient" (
"taskId" TEXT NOT NULL,
"userId" TEXT NOT NULL,
CONSTRAINT "ReminderTaskRecipient_pkey" PRIMARY KEY ("taskId","userId")
);
-- CreateTable
CREATE TABLE "ReminderOffset" (
"id" TEXT NOT NULL,
"targetType" "TargetType" NOT NULL,
"targetId" TEXT NOT NULL,
"offsetMinutes" INTEGER NOT NULL,
"channelInapp" BOOLEAN NOT NULL DEFAULT true,
"channelBark" BOOLEAN NOT NULL DEFAULT false,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "ReminderOffset_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "Notification" (
"id" TEXT NOT NULL,
"recipientId" TEXT NOT NULL,
"targetType" "TargetType" NOT NULL,
"targetId" TEXT NOT NULL,
"triggerAt" TIMESTAMP(3) NOT NULL,
"channel" "ChannelType" NOT NULL,
"status" "NotificationStatus" NOT NULL DEFAULT 'pending',
"lockedAt" TIMESTAMP(3),
"sentAt" TIMESTAMP(3),
"readAt" TIMESTAMP(3),
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
"updatedAt" TIMESTAMP(3) NOT NULL,
CONSTRAINT "Notification_pkey" PRIMARY KEY ("id")
);
-- CreateTable
CREATE TABLE "DeliveryLog" (
"id" TEXT NOT NULL,
"notificationId" TEXT NOT NULL,
"attemptNo" INTEGER NOT NULL,
"channel" "ChannelType" NOT NULL,
"status" "NotificationStatus" NOT NULL,
"responseMeta" JSONB,
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
CONSTRAINT "DeliveryLog_pkey" PRIMARY KEY ("id")
);
-- CreateIndex
CREATE UNIQUE INDEX "User_username_key" ON "User"("username");
-- CreateIndex
CREATE UNIQUE INDEX "Invite_code_key" ON "Invite"("code");
-- CreateIndex
CREATE INDEX "Invite_creatorId_idx" ON "Invite"("creatorId");
-- CreateIndex
CREATE INDEX "Invite_expiresAt_idx" ON "Invite"("expiresAt");
-- CreateIndex
CREATE INDEX "Todo_ownerId_dueAt_idx" ON "Todo"("ownerId", "dueAt");
-- CreateIndex
CREATE INDEX "Todo_recurrenceRuleId_idx" ON "Todo"("recurrenceRuleId");
-- CreateIndex
CREATE INDEX "ReminderTask_creatorId_dueAt_idx" ON "ReminderTask"("creatorId", "dueAt");
-- CreateIndex
CREATE INDEX "ReminderTask_recurrenceRuleId_idx" ON "ReminderTask"("recurrenceRuleId");
-- CreateIndex
CREATE INDEX "ReminderTaskRecipient_userId_idx" ON "ReminderTaskRecipient"("userId");
-- CreateIndex
CREATE INDEX "ReminderOffset_targetType_targetId_idx" ON "ReminderOffset"("targetType", "targetId");
-- CreateIndex
CREATE INDEX "Notification_status_triggerAt_idx" ON "Notification"("status", "triggerAt");
-- CreateIndex
CREATE INDEX "Notification_recipientId_readAt_idx" ON "Notification"("recipientId", "readAt");
-- CreateIndex
CREATE UNIQUE INDEX "Notification_recipientId_targetType_targetId_triggerAt_chan_key" ON "Notification"("recipientId", "targetType", "targetId", "triggerAt", "channel");
-- CreateIndex
CREATE INDEX "DeliveryLog_notificationId_idx" ON "DeliveryLog"("notificationId");
-- AddForeignKey
ALTER TABLE "Invite" ADD CONSTRAINT "Invite_creatorId_fkey" FOREIGN KEY ("creatorId") REFERENCES "User"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "Todo" ADD CONSTRAINT "Todo_ownerId_fkey" FOREIGN KEY ("ownerId") REFERENCES "User"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "Todo" ADD CONSTRAINT "Todo_recurrenceRuleId_fkey" FOREIGN KEY ("recurrenceRuleId") REFERENCES "RecurrenceRule"("id") ON DELETE SET NULL ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "ReminderTask" ADD CONSTRAINT "ReminderTask_creatorId_fkey" FOREIGN KEY ("creatorId") REFERENCES "User"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "ReminderTask" ADD CONSTRAINT "ReminderTask_recurrenceRuleId_fkey" FOREIGN KEY ("recurrenceRuleId") REFERENCES "RecurrenceRule"("id") ON DELETE SET NULL ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "ReminderTaskRecipient" ADD CONSTRAINT "ReminderTaskRecipient_taskId_fkey" FOREIGN KEY ("taskId") REFERENCES "ReminderTask"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "ReminderTaskRecipient" ADD CONSTRAINT "ReminderTaskRecipient_userId_fkey" FOREIGN KEY ("userId") REFERENCES "User"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "Notification" ADD CONSTRAINT "Notification_recipientId_fkey" FOREIGN KEY ("recipientId") REFERENCES "User"("id") ON DELETE RESTRICT ON UPDATE CASCADE;
-- AddForeignKey
ALTER TABLE "DeliveryLog" ADD CONSTRAINT "DeliveryLog_notificationId_fkey" FOREIGN KEY ("notificationId") REFERENCES "Notification"("id") ON DELETE RESTRICT ON UPDATE CASCADE;

View File

@@ -0,0 +1,2 @@
-- AlterTable
ALTER TABLE "User" ADD COLUMN "avatar" TEXT;

View File

@@ -0,0 +1,3 @@
# Please do not edit this file manually
# It should be added in your version-control system (i.e. Git)
provider = "postgresql"

View File

@@ -0,0 +1,176 @@
generator client {
provider = "prisma-client-js"
}
datasource db {
provider = "postgresql"
url = env("DATABASE_URL")
}
enum RecurrenceType {
hourly
daily
weekly
monthly
yearly
}
enum TargetType {
todo
reminder_task
}
enum ChannelType {
inapp
bark
}
enum NotificationStatus {
pending
queued
sent
failed
}
model User {
id String @id @default(cuid())
username String @unique
passwordHash String
avatar String?
timezone String @default("Asia/Shanghai")
barkUrl String?
inappEnabled Boolean @default(true)
barkEnabled Boolean @default(false)
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
invites Invite[] @relation("InviteCreator")
todos Todo[] @relation("TodoOwner")
createdTasks ReminderTask[] @relation("TaskCreator")
taskRecipients ReminderTaskRecipient[]
notifications Notification[] @relation("NotificationRecipient")
}
model Invite {
id String @id @default(cuid())
code String @unique
creatorId String
maxUses Int @default(5)
usedCount Int @default(0)
expiresAt DateTime
revokedAt DateTime?
createdAt DateTime @default(now())
creator User @relation("InviteCreator", fields: [creatorId], references: [id])
@@index([creatorId])
@@index([expiresAt])
}
model RecurrenceRule {
id String @id @default(cuid())
type RecurrenceType
interval Int @default(1)
byWeekday Int?
byMonthday Int?
timezone String @default("Asia/Shanghai")
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
todos Todo[]
tasks ReminderTask[]
}
model Todo {
id String @id @default(cuid())
ownerId String
title String
description String?
dueAt DateTime
recurrenceRuleId String?
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
owner User @relation("TodoOwner", fields: [ownerId], references: [id])
recurrenceRule RecurrenceRule? @relation(fields: [recurrenceRuleId], references: [id])
@@index([ownerId, dueAt])
@@index([recurrenceRuleId])
}
model ReminderTask {
id String @id @default(cuid())
creatorId String
title String
description String?
dueAt DateTime
recurrenceRuleId String?
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
creator User @relation("TaskCreator", fields: [creatorId], references: [id])
recurrenceRule RecurrenceRule? @relation(fields: [recurrenceRuleId], references: [id])
recipients ReminderTaskRecipient[]
@@index([creatorId, dueAt])
@@index([recurrenceRuleId])
}
model ReminderTaskRecipient {
taskId String
userId String
task ReminderTask @relation(fields: [taskId], references: [id])
user User @relation(fields: [userId], references: [id])
@@id([taskId, userId])
@@index([userId])
}
model ReminderOffset {
id String @id @default(cuid())
targetType TargetType
targetId String
offsetMinutes Int
channelInapp Boolean @default(true)
channelBark Boolean @default(false)
createdAt DateTime @default(now())
@@index([targetType, targetId])
}
model Notification {
id String @id @default(cuid())
recipientId String
targetType TargetType
targetId String
triggerAt DateTime
channel ChannelType
status NotificationStatus @default(pending)
lockedAt DateTime?
sentAt DateTime?
readAt DateTime?
createdAt DateTime @default(now())
updatedAt DateTime @updatedAt
recipient User @relation("NotificationRecipient", fields: [recipientId], references: [id])
logs DeliveryLog[]
@@unique([recipientId, targetType, targetId, triggerAt, channel])
@@index([status, triggerAt])
@@index([recipientId, readAt])
}
model DeliveryLog {
id String @id @default(cuid())
notificationId String
attemptNo Int
channel ChannelType
status NotificationStatus
responseMeta Json?
createdAt DateTime @default(now())
notification Notification @relation(fields: [notificationId], references: [id])
@@index([notificationId])
}

39
backend/src/app.ts Normal file
View File

@@ -0,0 +1,39 @@
import cors from "cors";
import express from "express";
import rateLimit from "express-rate-limit";
import helmet from "helmet";
import { authRouter } from "./routes/auth";
import { inviteRouter } from "./routes/invites";
import { meRouter } from "./routes/me";
import { notificationRouter } from "./routes/notifications";
import { reminderTaskRouter } from "./routes/reminderTasks";
import { todoRouter } from "./routes/todos";
import { userRouter } from "./routes/users";
export const createApp = () => {
const app = express();
app.use(helmet());
app.use(cors());
app.use(express.json());
app.use(
rateLimit({
windowMs: 60 * 1000,
max: 120,
standardHeaders: true,
legacyHeaders: false,
})
);
app.get("/health", (_req, res) => res.json({ ok: true }));
app.use("/api/auth", authRouter);
app.use("/api/invites", inviteRouter);
app.use("/api/me", meRouter);
app.use("/api/notifications", notificationRouter);
app.use("/api/reminder-tasks", reminderTaskRouter);
app.use("/api/todos", todoRouter);
app.use("/api/users", userRouter);
return app;
};

3
backend/src/db.ts Normal file
View File

@@ -0,0 +1,3 @@
import { PrismaClient } from "@prisma/client";
export const prisma = new PrismaClient();

11
backend/src/index.ts Normal file
View File

@@ -0,0 +1,11 @@
import "dotenv/config";
import { createApp } from "./app";
const port = Number(process.env.PORT || 4000);
const app = createApp();
app.listen(port, () => {
// eslint-disable-next-line no-console
console.log(`Notify API running on :${port}`);
});

View File

@@ -0,0 +1,21 @@
import type { Request, Response, NextFunction } from "express";
import jwt from "jsonwebtoken";
export type AuthRequest = Request & { userId?: string };
export const requireAuth = (req: AuthRequest, res: Response, next: NextFunction) => {
const header = req.headers.authorization;
if (!header?.startsWith("Bearer ")) {
return res.status(401).json({ error: "Unauthorized" });
}
const token = header.slice("Bearer ".length);
try {
const payload = jwt.verify(token, process.env.JWT_SECRET || "dev-secret") as {
userId: string;
};
req.userId = payload.userId;
return next();
} catch {
return res.status(401).json({ error: "Unauthorized" });
}
};

View File

@@ -0,0 +1,93 @@
import { Router } from "express";
import bcrypt from "bcryptjs";
import jwt from "jsonwebtoken";
import { z } from "zod";
import { prisma } from "../db";
export const authRouter = Router();
const registerSchema = z.object({
username: z.string().min(3),
password: z.string().min(6),
inviteCode: z.string().min(4),
});
authRouter.post("/register", async (req, res) => {
const parsed = registerSchema.safeParse(req.body);
if (!parsed.success) {
return res.status(400).json({ error: "Invalid payload" });
}
const { username, password, inviteCode } = parsed.data;
const now = new Date();
try {
const result = await prisma.$transaction(async (tx) => {
const invite = await tx.invite.findFirst({
where: {
code: inviteCode,
revokedAt: null,
expiresAt: { gt: now },
},
});
if (!invite || invite.usedCount >= invite.maxUses) {
throw new Error("Invalid invite");
}
const existing = await tx.user.findUnique({ where: { username } });
if (existing) {
throw new Error("Username taken");
}
const passwordHash = await bcrypt.hash(password, 10);
const user = await tx.user.create({
data: { username, passwordHash },
});
await tx.invite.update({
where: { id: invite.id },
data: { usedCount: invite.usedCount + 1 },
});
return user;
});
const token = jwt.sign({ userId: result.id }, process.env.JWT_SECRET || "dev-secret", {
expiresIn: "7d",
});
return res.json({ token, user: { id: result.id, username: result.username } });
} catch (err) {
const message = err instanceof Error ? err.message : "Register failed";
const status = message === "Invalid invite" ? 400 : 409;
return res.status(status).json({ error: message });
}
});
const loginSchema = z.object({
username: z.string().min(3),
password: z.string().min(6),
});
authRouter.post("/login", async (req, res) => {
const parsed = loginSchema.safeParse(req.body);
if (!parsed.success) {
return res.status(400).json({ error: "Invalid payload" });
}
const { username, password } = parsed.data;
const user = await prisma.user.findUnique({ where: { username } });
if (!user) {
return res.status(401).json({ error: "Invalid credentials" });
}
const ok = await bcrypt.compare(password, user.passwordHash);
if (!ok) {
return res.status(401).json({ error: "Invalid credentials" });
}
const token = jwt.sign({ userId: user.id }, process.env.JWT_SECRET || "dev-secret", {
expiresIn: "7d",
});
return res.json({ token, user: { id: user.id, username: user.username } });
});

View File

@@ -0,0 +1,55 @@
import { Router } from "express";
import { z } from "zod";
import { prisma } from "../db";
import { requireAuth, type AuthRequest } from "../middleware/auth";
export const inviteRouter = Router();
inviteRouter.use(requireAuth);
const createSchema = z.object({
maxUses: z.number().int().min(1).max(20).optional(),
expiresInDays: z.number().int().min(1).max(30).optional(),
});
inviteRouter.post("/", async (req: AuthRequest, res) => {
const parsed = createSchema.safeParse(req.body ?? {});
if (!parsed.success) {
return res.status(400).json({ error: "Invalid payload" });
}
const maxUses = parsed.data.maxUses ?? 5;
const expiresInDays = parsed.data.expiresInDays ?? 7;
const expiresAt = new Date(Date.now() + expiresInDays * 24 * 60 * 60 * 1000);
const code = `INV-${Math.random().toString(36).slice(2, 8).toUpperCase()}`;
const invite = await prisma.invite.create({
data: {
code,
creatorId: req.userId!,
maxUses,
expiresAt,
},
});
return res.json(invite);
});
inviteRouter.get("/", async (req: AuthRequest, res) => {
const invites = await prisma.invite.findMany({
where: { creatorId: req.userId! },
orderBy: { createdAt: "desc" },
});
return res.json(invites);
});
inviteRouter.post("/:id/revoke", async (req: AuthRequest, res) => {
const invite = await prisma.invite.updateMany({
where: { id: req.params.id, creatorId: req.userId! },
data: { revokedAt: new Date() },
});
if (invite.count === 0) {
return res.status(404).json({ error: "Invite not found" });
}
return res.json({ ok: true });
});

53
backend/src/routes/me.ts Normal file
View File

@@ -0,0 +1,53 @@
import { Router } from "express";
import { z } from "zod";
import { prisma } from "../db";
import { requireAuth, type AuthRequest } from "../middleware/auth";
export const meRouter = Router();
meRouter.use(requireAuth);
meRouter.get("/", async (req: AuthRequest, res) => {
const user = await prisma.user.findUnique({
where: { id: req.userId! },
select: {
id: true,
username: true,
avatar: true,
timezone: true,
barkUrl: true,
inappEnabled: true,
barkEnabled: true,
},
});
return res.json(user);
});
const settingsSchema = z.object({
avatar: z.string().url().optional().nullable(),
timezone: z.string().optional(),
barkUrl: z.string().url().optional().nullable(),
inappEnabled: z.boolean().optional(),
barkEnabled: z.boolean().optional(),
});
meRouter.put("/settings", async (req: AuthRequest, res) => {
const parsed = settingsSchema.safeParse(req.body);
if (!parsed.success) {
return res.status(400).json({ error: "Invalid payload" });
}
const user = await prisma.user.update({
where: { id: req.userId! },
data: parsed.data,
select: {
id: true,
username: true,
avatar: true,
timezone: true,
barkUrl: true,
inappEnabled: true,
barkEnabled: true,
},
});
return res.json(user);
});

View File

@@ -0,0 +1,40 @@
import { Router } from "express";
import { prisma } from "../db";
import { requireAuth, type AuthRequest } from "../middleware/auth";
export const notificationRouter = Router();
notificationRouter.use(requireAuth);
notificationRouter.get("/", async (req: AuthRequest, res) => {
const status = (req.query.status as string | undefined) ?? "all";
const where =
status === "unread"
? { recipientId: req.userId!, readAt: null }
: { recipientId: req.userId! };
const notifications = await prisma.notification.findMany({
where,
orderBy: { triggerAt: "desc" },
});
return res.json(notifications);
});
notificationRouter.post("/:id/read", async (req: AuthRequest, res) => {
const updated = await prisma.notification.updateMany({
where: { id: req.params.id, recipientId: req.userId! },
data: { readAt: new Date() },
});
if (updated.count === 0) {
return res.status(404).json({ error: "Not found" });
}
return res.json({ ok: true });
});
notificationRouter.post("/read-all", async (req: AuthRequest, res) => {
await prisma.notification.updateMany({
where: { recipientId: req.userId!, readAt: null },
data: { readAt: new Date() },
});
return res.json({ ok: true });
});

View File

@@ -0,0 +1,213 @@
import { Router } from "express";
import { z } from "zod";
import { prisma } from "../db";
import { requireAuth, type AuthRequest } from "../middleware/auth";
export const reminderTaskRouter = Router();
reminderTaskRouter.use(requireAuth);
const recurrenceSchema = z.object({
type: z.enum(["hourly", "daily", "weekly", "monthly", "yearly"]),
interval: z.number().int().min(1).optional(),
byWeekday: z.number().int().min(0).max(6).optional(),
byMonthday: z.number().int().min(1).max(31).optional(),
timezone: z.string().optional(),
});
const offsetSchema = z.object({
offsetMinutes: z.number().int().min(0),
channelInapp: z.boolean().optional(),
channelBark: z.boolean().optional(),
});
const taskSchema = z.object({
title: z.string().min(1),
description: z.string().optional(),
dueAt: z.string().datetime(),
recipientIds: z.array(z.string().min(1)),
recurrenceRule: recurrenceSchema.optional(),
offsets: z.array(offsetSchema).optional(),
});
reminderTaskRouter.get("/", async (req: AuthRequest, res) => {
const items = await prisma.reminderTask.findMany({
where: { creatorId: req.userId! },
include: { recipients: true, recurrenceRule: true },
orderBy: { dueAt: "asc" },
});
const offsets = await prisma.reminderOffset.findMany({
where: { targetType: "reminder_task", targetId: { in: items.map((item) => item.id) } },
});
const offsetsById = offsets.reduce<Record<string, typeof offsets>>((acc, offset) => {
acc[offset.targetId] = acc[offset.targetId] ?? [];
acc[offset.targetId].push(offset);
return acc;
}, {});
const withOffsets = items.map((item) => ({
...item,
offsets: offsetsById[item.id] ?? [],
}));
return res.json(withOffsets);
});
reminderTaskRouter.post("/", async (req: AuthRequest, res) => {
const parsed = taskSchema.safeParse(req.body);
if (!parsed.success) {
return res.status(400).json({ error: "Invalid payload" });
}
const { recurrenceRule, offsets = [], recipientIds, ...data } = parsed.data;
const task = await prisma.$transaction(async (tx) => {
const rule = recurrenceRule
? await tx.recurrenceRule.create({
data: {
type: recurrenceRule.type,
interval: recurrenceRule.interval ?? 1,
byWeekday: recurrenceRule.byWeekday,
byMonthday: recurrenceRule.byMonthday,
timezone: recurrenceRule.timezone ?? "Asia/Shanghai",
},
})
: null;
const created = await tx.reminderTask.create({
data: {
creatorId: req.userId!,
title: data.title,
description: data.description,
dueAt: new Date(data.dueAt),
recurrenceRuleId: rule?.id,
},
});
await tx.reminderTaskRecipient.createMany({
data: recipientIds.map((userId) => ({ taskId: created.id, userId })),
skipDuplicates: true,
});
if (offsets.length > 0) {
await tx.reminderOffset.createMany({
data: offsets.map((offset) => ({
targetType: "reminder_task",
targetId: created.id,
offsetMinutes: offset.offsetMinutes,
channelInapp: offset.channelInapp ?? true,
channelBark: offset.channelBark ?? false,
})),
});
}
return created;
});
return res.json(task);
});
reminderTaskRouter.get("/:id", async (req: AuthRequest, res) => {
const task = await prisma.reminderTask.findFirst({
where: { id: req.params.id, creatorId: req.userId! },
include: { recipients: true, recurrenceRule: true },
});
if (!task) {
return res.status(404).json({ error: "Not found" });
}
const offsets = await prisma.reminderOffset.findMany({
where: { targetType: "reminder_task", targetId: task.id },
});
return res.json({ ...task, offsets });
});
reminderTaskRouter.put("/:id", async (req: AuthRequest, res) => {
const parsed = taskSchema.safeParse(req.body);
if (!parsed.success) {
return res.status(400).json({ error: "Invalid payload" });
}
const { recurrenceRule, offsets = [], recipientIds, ...data } = parsed.data;
const updated = await prisma.$transaction(async (tx) => {
const existing = await tx.reminderTask.findFirst({
where: { id: req.params.id, creatorId: req.userId! },
});
if (!existing) {
throw new Error("Not found");
}
let recurrenceRuleId = existing.recurrenceRuleId;
if (recurrenceRule) {
if (recurrenceRuleId) {
await tx.recurrenceRule.update({
where: { id: recurrenceRuleId },
data: {
type: recurrenceRule.type,
interval: recurrenceRule.interval ?? 1,
byWeekday: recurrenceRule.byWeekday,
byMonthday: recurrenceRule.byMonthday,
timezone: recurrenceRule.timezone ?? "Asia/Shanghai",
},
});
} else {
const created = await tx.recurrenceRule.create({
data: {
type: recurrenceRule.type,
interval: recurrenceRule.interval ?? 1,
byWeekday: recurrenceRule.byWeekday,
byMonthday: recurrenceRule.byMonthday,
timezone: recurrenceRule.timezone ?? "Asia/Shanghai",
},
});
recurrenceRuleId = created.id;
}
} else if (recurrenceRuleId) {
await tx.recurrenceRule.delete({ where: { id: recurrenceRuleId } });
recurrenceRuleId = null;
}
await tx.reminderTaskRecipient.deleteMany({ where: { taskId: existing.id } });
await tx.reminderTaskRecipient.createMany({
data: recipientIds.map((userId) => ({ taskId: existing.id, userId })),
skipDuplicates: true,
});
await tx.reminderOffset.deleteMany({
where: { targetType: "reminder_task", targetId: existing.id },
});
if (offsets.length > 0) {
await tx.reminderOffset.createMany({
data: offsets.map((offset) => ({
targetType: "reminder_task",
targetId: existing.id,
offsetMinutes: offset.offsetMinutes,
channelInapp: offset.channelInapp ?? true,
channelBark: offset.channelBark ?? false,
})),
});
}
return tx.reminderTask.update({
where: { id: existing.id },
data: {
title: data.title,
description: data.description,
dueAt: new Date(data.dueAt),
recurrenceRuleId,
},
});
});
return res.json(updated);
});
reminderTaskRouter.delete("/:id", async (req: AuthRequest, res) => {
const deleted = await prisma.reminderTask.deleteMany({
where: { id: req.params.id, creatorId: req.userId! },
});
if (deleted.count === 0) {
return res.status(404).json({ error: "Not found" });
}
await prisma.reminderOffset.deleteMany({
where: { targetType: "reminder_task", targetId: req.params.id },
});
await prisma.reminderTaskRecipient.deleteMany({ where: { taskId: req.params.id } });
return res.json({ ok: true });
});

196
backend/src/routes/todos.ts Normal file
View File

@@ -0,0 +1,196 @@
import { Router } from "express";
import { z } from "zod";
import { prisma } from "../db";
import { requireAuth, type AuthRequest } from "../middleware/auth";
export const todoRouter = Router();
todoRouter.use(requireAuth);
const recurrenceSchema = z.object({
type: z.enum(["hourly", "daily", "weekly", "monthly", "yearly"]),
interval: z.number().int().min(1).optional(),
byWeekday: z.number().int().min(0).max(6).optional(),
byMonthday: z.number().int().min(1).max(31).optional(),
timezone: z.string().optional(),
});
const offsetSchema = z.object({
offsetMinutes: z.number().int().min(0),
channelInapp: z.boolean().optional(),
channelBark: z.boolean().optional(),
});
const todoSchema = z.object({
title: z.string().min(1),
description: z.string().optional(),
dueAt: z.string().datetime(),
recurrenceRule: recurrenceSchema.optional(),
offsets: z.array(offsetSchema).optional(),
});
todoRouter.get("/", async (req: AuthRequest, res) => {
const items = await prisma.todo.findMany({
where: { ownerId: req.userId! },
include: { recurrenceRule: true },
orderBy: { dueAt: "asc" },
});
const offsets = await prisma.reminderOffset.findMany({
where: { targetType: "todo", targetId: { in: items.map((item) => item.id) } },
});
const offsetsById = offsets.reduce<Record<string, typeof offsets>>((acc, offset) => {
acc[offset.targetId] = acc[offset.targetId] ?? [];
acc[offset.targetId].push(offset);
return acc;
}, {});
const withOffsets = items.map((item) => ({
...item,
offsets: offsetsById[item.id] ?? [],
}));
return res.json(withOffsets);
});
todoRouter.post("/", async (req: AuthRequest, res) => {
const parsed = todoSchema.safeParse(req.body);
if (!parsed.success) {
return res.status(400).json({ error: "Invalid payload" });
}
const { recurrenceRule, offsets = [], ...data } = parsed.data;
const todo = await prisma.$transaction(async (tx) => {
const rule = recurrenceRule
? await tx.recurrenceRule.create({
data: {
type: recurrenceRule.type,
interval: recurrenceRule.interval ?? 1,
byWeekday: recurrenceRule.byWeekday,
byMonthday: recurrenceRule.byMonthday,
timezone: recurrenceRule.timezone ?? "Asia/Shanghai",
},
})
: null;
const created = await tx.todo.create({
data: {
ownerId: req.userId!,
title: data.title,
description: data.description,
dueAt: new Date(data.dueAt),
recurrenceRuleId: rule?.id,
},
});
if (offsets.length > 0) {
await tx.reminderOffset.createMany({
data: offsets.map((offset) => ({
targetType: "todo",
targetId: created.id,
offsetMinutes: offset.offsetMinutes,
channelInapp: offset.channelInapp ?? true,
channelBark: offset.channelBark ?? false,
})),
});
}
return created;
});
return res.json(todo);
});
todoRouter.get("/:id", async (req: AuthRequest, res) => {
const todo = await prisma.todo.findFirst({
where: { id: req.params.id, ownerId: req.userId! },
include: { recurrenceRule: true },
});
if (!todo) {
return res.status(404).json({ error: "Not found" });
}
const offsets = await prisma.reminderOffset.findMany({
where: { targetType: "todo", targetId: todo.id },
});
return res.json({ ...todo, offsets });
});
todoRouter.put("/:id", async (req: AuthRequest, res) => {
const parsed = todoSchema.safeParse(req.body);
if (!parsed.success) {
return res.status(400).json({ error: "Invalid payload" });
}
const { recurrenceRule, offsets = [], ...data } = parsed.data;
const updated = await prisma.$transaction(async (tx) => {
const existing = await tx.todo.findFirst({
where: { id: req.params.id, ownerId: req.userId! },
});
if (!existing) {
throw new Error("Not found");
}
let recurrenceRuleId = existing.recurrenceRuleId;
if (recurrenceRule) {
if (recurrenceRuleId) {
await tx.recurrenceRule.update({
where: { id: recurrenceRuleId },
data: {
type: recurrenceRule.type,
interval: recurrenceRule.interval ?? 1,
byWeekday: recurrenceRule.byWeekday,
byMonthday: recurrenceRule.byMonthday,
timezone: recurrenceRule.timezone ?? "Asia/Shanghai",
},
});
} else {
const created = await tx.recurrenceRule.create({
data: {
type: recurrenceRule.type,
interval: recurrenceRule.interval ?? 1,
byWeekday: recurrenceRule.byWeekday,
byMonthday: recurrenceRule.byMonthday,
timezone: recurrenceRule.timezone ?? "Asia/Shanghai",
},
});
recurrenceRuleId = created.id;
}
} else if (recurrenceRuleId) {
await tx.recurrenceRule.delete({ where: { id: recurrenceRuleId } });
recurrenceRuleId = null;
}
await tx.reminderOffset.deleteMany({ where: { targetType: "todo", targetId: existing.id } });
if (offsets.length > 0) {
await tx.reminderOffset.createMany({
data: offsets.map((offset) => ({
targetType: "todo",
targetId: existing.id,
offsetMinutes: offset.offsetMinutes,
channelInapp: offset.channelInapp ?? true,
channelBark: offset.channelBark ?? false,
})),
});
}
return tx.todo.update({
where: { id: existing.id },
data: {
title: data.title,
description: data.description,
dueAt: new Date(data.dueAt),
recurrenceRuleId,
},
});
});
return res.json(updated);
});
todoRouter.delete("/:id", async (req: AuthRequest, res) => {
const deleted = await prisma.todo.deleteMany({
where: { id: req.params.id, ownerId: req.userId! },
});
if (deleted.count === 0) {
return res.status(404).json({ error: "Not found" });
}
await prisma.reminderOffset.deleteMany({ where: { targetType: "todo", targetId: req.params.id } });
return res.json({ ok: true });
});

View File

@@ -0,0 +1,24 @@
import { Router } from "express";
import { prisma } from "../db";
import { requireAuth } from "../middleware/auth";
export const userRouter = Router();
userRouter.use(requireAuth);
userRouter.get("/", async (req, res) => {
const query = (req.query.query as string | undefined)?.trim();
const users = await prisma.user.findMany({
where: query
? {
username: {
contains: query,
mode: "insensitive",
},
}
: undefined,
select: { id: true, username: true, avatar: true },
orderBy: { username: "asc" },
});
return res.json(users);
});

View File

@@ -0,0 +1,23 @@
type BarkPayload = {
title: string;
body: string;
group?: string;
url?: string;
sound?: string;
icon?: string;
};
export const sendBarkPush = async (baseUrl: string, payload: BarkPayload) => {
const response = await fetch(baseUrl, {
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify(payload),
});
const data = await response.json().catch(() => ({}));
if (!response.ok) {
const error = new Error(`Bark error: ${response.status}`);
(error as Error & { response?: unknown }).response = data;
throw error;
}
return data;
};

View File

@@ -0,0 +1,51 @@
import type { RecurrenceRule } from "@prisma/client";
const clampDay = (year: number, monthIndex: number, day: number) => {
const lastDay = new Date(Date.UTC(year, monthIndex + 1, 0)).getUTCDate();
return Math.min(day, lastDay);
};
export const addMonthsWithClamp = (date: Date, months: number) => {
const year = date.getUTCFullYear();
const month = date.getUTCMonth();
const day = date.getUTCDate();
const hour = date.getUTCHours();
const minute = date.getUTCMinutes();
const second = date.getUTCSeconds();
const targetMonth = month + months;
const targetYear = year + Math.floor(targetMonth / 12);
const normalizedMonth = ((targetMonth % 12) + 12) % 12;
const clampedDay = clampDay(targetYear, normalizedMonth, day);
return new Date(Date.UTC(targetYear, normalizedMonth, clampedDay, hour, minute, second));
};
export const addYearsWithClamp = (date: Date, years: number) => {
const targetYear = date.getUTCFullYear() + years;
const month = date.getUTCMonth();
const day = date.getUTCDate();
const hour = date.getUTCHours();
const minute = date.getUTCMinutes();
const second = date.getUTCSeconds();
const clampedDay = clampDay(targetYear, month, day);
return new Date(Date.UTC(targetYear, month, clampedDay, hour, minute, second));
};
export const nextDueAt = (dueAt: Date, rule: RecurrenceRule) => {
const interval = rule.interval ?? 1;
switch (rule.type) {
case "hourly":
return new Date(dueAt.getTime() + interval * 60 * 60 * 1000);
case "daily":
return new Date(dueAt.getTime() + interval * 24 * 60 * 60 * 1000);
case "weekly":
return new Date(dueAt.getTime() + interval * 7 * 24 * 60 * 60 * 1000);
case "monthly":
return addMonthsWithClamp(dueAt, interval);
case "yearly":
return addYearsWithClamp(dueAt, interval);
default:
return dueAt;
}
};

255
backend/src/worker.ts Normal file
View File

@@ -0,0 +1,255 @@
import "dotenv/config";
import { prisma } from "./db";
import { sendBarkPush } from "./services/bark";
import { nextDueAt } from "./services/recurrence";
const LOCK_TIMEOUT_MS = 5 * 60 * 1000;
const calcBackoffMs = (attemptNo: number) => {
const steps = [60_000, 5 * 60_000, 15 * 60_000, 60 * 60_000, 4 * 60 * 60_000];
return steps[Math.min(attemptNo - 1, steps.length - 1)];
};
const buildOffsets = (offsets: { offsetMinutes: number; channelInapp: boolean; channelBark: boolean }[]) => {
if (offsets.length === 0) {
return [{ offsetMinutes: 0, channelInapp: true, channelBark: false }];
}
return offsets;
};
const generateNotifications = async (now: Date) => {
const maxOffset = await prisma.reminderOffset.aggregate({
_max: { offsetMinutes: true },
});
const offsetMinutes = maxOffset._max.offsetMinutes ?? 0;
const upperBound = new Date(now.getTime() + offsetMinutes * 60 * 1000);
const todos = await prisma.todo.findMany({
where: { dueAt: { lte: upperBound } },
include: { owner: true, recurrenceRule: true },
});
const todoOffsets = await prisma.reminderOffset.findMany({
where: { targetType: "todo", targetId: { in: todos.map((todo) => todo.id) } },
});
const todoOffsetsById = todoOffsets.reduce<Record<string, typeof todoOffsets>>((acc, offset) => {
acc[offset.targetId] = acc[offset.targetId] ?? [];
acc[offset.targetId].push(offset);
return acc;
}, {});
for (const todo of todos) {
const offsets = buildOffsets(todoOffsetsById[todo.id] ?? []);
const data = [];
for (const offset of offsets) {
const triggerAt = new Date(todo.dueAt.getTime() - offset.offsetMinutes * 60 * 1000);
if (triggerAt > now) continue;
if (offset.channelInapp && todo.owner.inappEnabled) {
data.push({
recipientId: todo.ownerId,
targetType: "todo" as const,
targetId: todo.id,
triggerAt,
channel: "inapp" as const,
});
}
if (offset.channelBark && todo.owner.barkEnabled && todo.owner.barkUrl) {
data.push({
recipientId: todo.ownerId,
targetType: "todo" as const,
targetId: todo.id,
triggerAt,
channel: "bark" as const,
});
}
}
if (data.length > 0) {
await prisma.notification.createMany({ data, skipDuplicates: true });
}
if (todo.recurrenceRule && todo.dueAt <= now) {
const next = nextDueAt(todo.dueAt, todo.recurrenceRule);
await prisma.todo.update({ where: { id: todo.id }, data: { dueAt: next } });
}
}
const tasks = await prisma.reminderTask.findMany({
where: { dueAt: { lte: upperBound } },
include: {
recurrenceRule: true,
recipients: { include: { user: true } },
},
});
const taskOffsets = await prisma.reminderOffset.findMany({
where: { targetType: "reminder_task", targetId: { in: tasks.map((task) => task.id) } },
});
const taskOffsetsById = taskOffsets.reduce<Record<string, typeof taskOffsets>>((acc, offset) => {
acc[offset.targetId] = acc[offset.targetId] ?? [];
acc[offset.targetId].push(offset);
return acc;
}, {});
for (const task of tasks) {
const offsets = buildOffsets(taskOffsetsById[task.id] ?? []);
const data = [];
for (const offset of offsets) {
const triggerAt = new Date(task.dueAt.getTime() - offset.offsetMinutes * 60 * 1000);
if (triggerAt > now) continue;
for (const recipient of task.recipients) {
const user = recipient.user;
if (offset.channelInapp && user.inappEnabled) {
data.push({
recipientId: user.id,
targetType: "reminder_task" as const,
targetId: task.id,
triggerAt,
channel: "inapp" as const,
});
}
if (offset.channelBark && user.barkEnabled && user.barkUrl) {
data.push({
recipientId: user.id,
targetType: "reminder_task" as const,
targetId: task.id,
triggerAt,
channel: "bark" as const,
});
}
}
}
if (data.length > 0) {
await prisma.notification.createMany({ data, skipDuplicates: true });
}
if (task.recurrenceRule && task.dueAt <= now) {
const next = nextDueAt(task.dueAt, task.recurrenceRule);
await prisma.reminderTask.update({ where: { id: task.id }, data: { dueAt: next } });
}
}
};
const deliverNotifications = async (now: Date) => {
const expiredLock = new Date(now.getTime() - LOCK_TIMEOUT_MS);
const pending = await prisma.notification.findMany({
where: {
status: "pending",
triggerAt: { lte: now },
OR: [{ lockedAt: null }, { lockedAt: { lt: expiredLock } }],
},
include: { recipient: true },
take: 50,
});
for (const notification of pending) {
const locked = await prisma.notification.updateMany({
where: { id: notification.id, status: "pending" },
data: { status: "queued", lockedAt: now },
});
if (locked.count === 0) continue;
try {
if (notification.channel === "inapp") {
await prisma.notification.update({
where: { id: notification.id },
data: { status: "sent", sentAt: now, lockedAt: null },
});
await prisma.deliveryLog.create({
data: {
notificationId: notification.id,
attemptNo: 1,
channel: "inapp",
status: "sent",
},
});
continue;
}
const recipient = notification.recipient;
if (!recipient.barkEnabled || !recipient.barkUrl) {
await prisma.notification.update({
where: { id: notification.id },
data: { status: "failed", lockedAt: null },
});
await prisma.deliveryLog.create({
data: {
notificationId: notification.id,
attemptNo: 1,
channel: "bark",
status: "failed",
responseMeta: { reason: "bark_disabled" },
},
});
continue;
}
const title = "Notify 提醒";
const body = `触发时间:${notification.triggerAt.toISOString()}`;
await sendBarkPush(recipient.barkUrl, {
title,
body,
group: "notify",
});
await prisma.notification.update({
where: { id: notification.id },
data: { status: "sent", sentAt: now, lockedAt: null },
});
await prisma.deliveryLog.create({
data: {
notificationId: notification.id,
attemptNo: 1,
channel: "bark",
status: "sent",
},
});
} catch (error) {
const logs = await prisma.deliveryLog.findMany({
where: { notificationId: notification.id },
orderBy: { attemptNo: "desc" },
take: 1,
});
const attemptNo = (logs[0]?.attemptNo ?? 0) + 1;
const shouldRetry = attemptNo < 5;
const retryAt = new Date(now.getTime() + calcBackoffMs(attemptNo));
await prisma.notification.update({
where: { id: notification.id },
data: {
status: shouldRetry ? "pending" : "failed",
lockedAt: null,
triggerAt: shouldRetry ? retryAt : notification.triggerAt,
},
});
await prisma.deliveryLog.create({
data: {
notificationId: notification.id,
attemptNo,
channel: notification.channel,
status: shouldRetry ? "pending" : "failed",
responseMeta: { message: (error as Error).message },
},
});
}
}
};
const loop = async () => {
const now = new Date();
await generateNotifications(now);
await deliverNotifications(now);
};
const start = async () => {
// eslint-disable-next-line no-console
console.log("Notify worker started");
await loop();
setInterval(loop, 30 * 1000);
};
start().catch((error) => {
// eslint-disable-next-line no-console
console.error("Worker error", error);
process.exit(1);
});

12
backend/tsconfig.json Normal file
View File

@@ -0,0 +1,12 @@
{
"compilerOptions": {
"target": "ES2022",
"module": "CommonJS",
"outDir": "dist",
"rootDir": "src",
"strict": true,
"esModuleInterop": true,
"skipLibCheck": true
},
"include": ["src/**/*.ts"]
}

1
backend_rust/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
/target

4444
backend_rust/Cargo.lock generated Normal file

File diff suppressed because it is too large Load Diff

34
backend_rust/Cargo.toml Normal file
View File

@@ -0,0 +1,34 @@
[workspace]
members = [".", "migration"]
resolver = "2"
[package]
name = "backend_rust"
version = "0.1.0"
edition = "2024"
[dependencies]
tokio = { version = "1.49.0", features = ["full"] }
actix-web = "4"
serde = { version = "1", features = ["derive"] }
serde_json = "1"
chrono = { version = "0", features = ["serde"] }
sea-orm = { version = "2.0.0-rc", features = ["runtime-tokio-rustls", "sqlx-postgres", "macros", "with-chrono", "with-uuid", "with-json"] }
tracing = "0.1"
tracing-subscriber = "0.3"
anyhow = "1"
thiserror = "1"
bcrypt = "0.17"
jsonwebtoken = "9"
uuid = { version = "1", features = ["v4", "serde"] }
validator = { version = "0.20", features = ["derive"] }
actix-web-httpauth = "0.8"
rand = "0.9"
reqwest = { version = "0.12", features = ["json"] }
urlencoding = "2"
actix-cors = "0.7.1"
actix-files = "0.6"
actix-multipart = "0.7"
futures-util = "0.3"
sea-orm-migration = "2.0.0-rc"
migration = { path = "./migration" }

46
backend_rust/Dockerfile Normal file
View File

@@ -0,0 +1,46 @@
# Build stage
FROM rust:1.84-alpine AS builder
RUN apk add --no-cache musl-dev pkgconfig openssl-dev
WORKDIR /app
# Copy manifests
COPY Cargo.toml Cargo.lock ./
COPY migration/Cargo.toml ./migration/
# Create dummy files to build dependencies
RUN mkdir src && echo "fn main() {}" > src/main.rs
RUN mkdir -p migration/src && echo "fn main() {}" > migration/src/main.rs && echo "" > migration/src/lib.rs
# Build dependencies only
RUN cargo build --release
# Remove dummy files
RUN rm -rf src migration/src
# Copy actual source code
COPY src ./src
COPY migration/src ./migration/src
# Build the actual application
RUN touch src/main.rs migration/src/main.rs migration/src/lib.rs
RUN cargo build --release
# Runtime stage
FROM alpine:3.21
RUN apk add --no-cache ca-certificates libgcc
WORKDIR /app
# Copy the binary from builder
COPY --from=builder /app/target/release/backend_rust /app/backend_rust
COPY --from=builder /app/target/release/migration /app/migration
# Create uploads directory
RUN mkdir -p /app/uploads/avatars
EXPOSE 4000
CMD ["/app/backend_rust"]

0
backend_rust/README.md Normal file
View File

View File

@@ -0,0 +1,22 @@
[package]
edition = "2024"
name = "migration"
publish = false
version = "0.1.0"
[lib]
name = "migration"
path = "src/lib.rs"
[dependencies]
tokio = { version = "1", features = ["macros", "rt", "rt-multi-thread"] }
[dependencies.sea-orm-migration]
features = [
# Enable at least one `ASYNC_RUNTIME` and `DATABASE_DRIVER` feature if you want to run migration via CLI.
# View the list of supported features at https://www.sea-ql.org/SeaORM/docs/install-and-config/database-and-async-runtime.
# e.g.
"runtime-tokio-rustls",
"sqlx-postgres",
]
version = "~2.0.0-rc"

View File

@@ -0,0 +1,47 @@
# Running Migrator CLI
- Generate a new migration file
```sh
cargo run -- generate MIGRATION_NAME
```
- Apply all pending migrations
```sh
cargo run
```
```sh
cargo run -- up
```
- Apply first 10 pending migrations
```sh
cargo run -- up -n 10
```
- Rollback last applied migrations
```sh
cargo run -- down
```
- Rollback last 10 applied migrations
```sh
cargo run -- down -n 10
```
- Drop all tables from the database, then reapply all migrations
```sh
cargo run -- fresh
```
- Rollback all applied migrations, then reapply all migrations
```sh
cargo run -- refresh
```
- Rollback all applied migrations
```sh
cargo run -- reset
```
- Check the status of all migrations
```sh
cargo run -- status
```
- Generate entity from sea-orm-cli
```sh
sea-orm-cli generate entity --database-url postgres://dyc:dycdyc89@192.168.150.142/notify --output-dir ./src/entity --entity-format dense
```

View File

@@ -0,0 +1,42 @@
pub use sea_orm_migration::prelude::*;
mod m20220101_000001_create_user;
mod m20220101_000002_create_enums;
mod m20220101_000003_create_invite;
mod m20220101_000004_create_recurrence_rule;
mod m20220101_000005_create_todo;
mod m20220101_000006_create_reminder_task;
mod m20220101_000007_create_reminder_task_recipient;
mod m20220101_000008_create_reminder_offset;
mod m20220101_000009_create_notification;
mod m20220101_000010_create_delivery_log;
mod m20260128_000011_modify_todo;
mod m20260129_000012_add_bark_params;
mod m20260129_000013_add_notification_offset_id;
mod m20260129_000014_convert_timestamps_to_timestamptz;
mod m20260129_000015_add_user_invite_id;
pub struct Migrator;
#[async_trait::async_trait]
impl MigratorTrait for Migrator {
fn migrations() -> Vec<Box<dyn MigrationTrait>> {
vec![
Box::new(m20220101_000001_create_user::Migration),
Box::new(m20220101_000002_create_enums::Migration),
Box::new(m20220101_000003_create_invite::Migration),
Box::new(m20220101_000004_create_recurrence_rule::Migration),
Box::new(m20220101_000005_create_todo::Migration),
Box::new(m20220101_000006_create_reminder_task::Migration),
Box::new(m20220101_000007_create_reminder_task_recipient::Migration),
Box::new(m20220101_000008_create_reminder_offset::Migration),
Box::new(m20220101_000009_create_notification::Migration),
Box::new(m20220101_000010_create_delivery_log::Migration),
Box::new(m20260128_000011_modify_todo::Migration),
Box::new(m20260129_000012_add_bark_params::Migration),
Box::new(m20260129_000013_add_notification_offset_id::Migration),
Box::new(m20260129_000014_convert_timestamps_to_timestamptz::Migration),
Box::new(m20260129_000015_add_user_invite_id::Migration),
]
}
}

View File

@@ -0,0 +1,74 @@
use sea_orm_migration::prelude::*;
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.create_table(
Table::create()
.table(User::Table)
.if_not_exists()
.col(ColumnDef::new(User::Id).uuid().primary_key().not_null())
.col(
ColumnDef::new(User::Username)
.string()
.unique_key()
.not_null(),
)
.col(ColumnDef::new(User::PasswordHash).string().not_null())
.col(ColumnDef::new(User::Avatar).string().null())
.col(
ColumnDef::new(User::Timezone)
.string()
.not_null()
.default("Asia/Shanghai"),
)
.col(ColumnDef::new(User::BarkUrl).string().null())
.col(
ColumnDef::new(User::InappEnabled)
.boolean()
.not_null()
.default(true),
)
.col(
ColumnDef::new(User::BarkEnabled)
.boolean()
.not_null()
.default(false),
)
.col(
ColumnDef::new(User::CreatedAt)
.timestamp()
.not_null()
.extra("DEFAULT NOW()"),
)
.col(ColumnDef::new(User::UpdatedAt).timestamp().not_null())
.to_owned(),
)
.await
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.drop_table(Table::drop().table("User").to_owned())
.await
}
}
#[derive(DeriveIden)]
enum User {
Table,
Id,
Username,
PasswordHash,
Avatar,
Timezone,
BarkUrl,
InappEnabled,
BarkEnabled,
CreatedAt,
UpdatedAt,
}

View File

@@ -0,0 +1,137 @@
use sea_orm_migration::prelude::*;
use sea_query::extension::postgres::Type;
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
// Create RecurrenceType enum
manager
.create_type(
Type::create()
.as_enum(RecurrenceType::Type)
.values([
RecurrenceType::Hourly,
RecurrenceType::Daily,
RecurrenceType::Weekly,
RecurrenceType::Monthly,
RecurrenceType::Yearly,
])
.to_owned(),
)
.await?;
// Create TargetType enum
manager
.create_type(
Type::create()
.as_enum(TargetType::Type)
.values([TargetType::Todo, TargetType::ReminderTask])
.to_owned(),
)
.await?;
// Create ChannelType enum
manager
.create_type(
Type::create()
.as_enum(ChannelType::Type)
.values([ChannelType::Inapp, ChannelType::Bark])
.to_owned(),
)
.await?;
// Create NotificationStatus enum
manager
.create_type(
Type::create()
.as_enum(NotificationStatus::Type)
.values([
NotificationStatus::Pending,
NotificationStatus::Queued,
NotificationStatus::Sent,
NotificationStatus::Failed,
])
.to_owned(),
)
.await?;
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.drop_type(Type::drop().name(NotificationStatus::Type).to_owned())
.await?;
manager
.drop_type(Type::drop().name(ChannelType::Type).to_owned())
.await?;
manager
.drop_type(Type::drop().name(TargetType::Type).to_owned())
.await?;
manager
.drop_type(Type::drop().name(RecurrenceType::Type).to_owned())
.await?;
Ok(())
}
}
// RecurrenceType enum
#[derive(DeriveIden)]
pub enum RecurrenceType {
#[sea_orm(iden = "recurrence_type")]
Type,
#[sea_orm(iden = "hourly")]
Hourly,
#[sea_orm(iden = "daily")]
Daily,
#[sea_orm(iden = "weekly")]
Weekly,
#[sea_orm(iden = "monthly")]
Monthly,
#[sea_orm(iden = "yearly")]
Yearly,
}
// TargetType enum
#[derive(DeriveIden)]
pub enum TargetType {
#[sea_orm(iden = "target_type")]
Type,
#[sea_orm(iden = "todo")]
Todo,
#[sea_orm(iden = "reminder_task")]
ReminderTask,
}
// ChannelType enum
#[derive(DeriveIden)]
pub enum ChannelType {
#[sea_orm(iden = "channel_type")]
Type,
#[sea_orm(iden = "inapp")]
Inapp,
#[sea_orm(iden = "bark")]
Bark,
}
// NotificationStatus enum
#[derive(DeriveIden)]
pub enum NotificationStatus {
#[sea_orm(iden = "notification_status")]
Type,
#[sea_orm(iden = "pending")]
Pending,
#[sea_orm(iden = "queued")]
Queued,
#[sea_orm(iden = "sent")]
Sent,
#[sea_orm(iden = "failed")]
Failed,
}

View File

@@ -0,0 +1,102 @@
use sea_orm_migration::prelude::*;
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.create_table(
Table::create()
.table(Invite::Table)
.if_not_exists()
.col(ColumnDef::new(Invite::Id).uuid().primary_key().not_null())
.col(
ColumnDef::new(Invite::Code)
.string()
.unique_key()
.not_null(),
)
.col(ColumnDef::new(Invite::CreatorId).uuid().not_null())
.col(
ColumnDef::new(Invite::MaxUses)
.integer()
.not_null()
.default(5),
)
.col(
ColumnDef::new(Invite::UsedCount)
.integer()
.not_null()
.default(0),
)
.col(ColumnDef::new(Invite::ExpiresAt).timestamp().not_null())
.col(ColumnDef::new(Invite::RevokedAt).timestamp().null())
.col(
ColumnDef::new(Invite::CreatedAt)
.timestamp()
.not_null()
.extra("DEFAULT NOW()"),
)
.foreign_key(
ForeignKey::create()
.name("FK_invite_creator")
.from(Invite::Table, Invite::CreatorId)
.to(User::Table, User::Id)
.on_delete(ForeignKeyAction::Cascade)
.on_update(ForeignKeyAction::Cascade),
)
.to_owned(),
)
.await?;
// Create indexes
manager
.create_index(
Index::create()
.name("IDX_invite_creator_id")
.table(Invite::Table)
.col(Invite::CreatorId)
.to_owned(),
)
.await?;
manager
.create_index(
Index::create()
.name("IDX_invite_expires_at")
.table(Invite::Table)
.col(Invite::ExpiresAt)
.to_owned(),
)
.await?;
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.drop_table(Table::drop().table(Invite::Table).to_owned())
.await
}
}
#[derive(DeriveIden)]
enum Invite {
Table,
Id,
Code,
CreatorId,
MaxUses,
UsedCount,
ExpiresAt,
RevokedAt,
CreatedAt,
}
#[derive(DeriveIden)]
enum User {
Table,
Id,
}

View File

@@ -0,0 +1,75 @@
use sea_orm_migration::prelude::*;
use crate::m20220101_000002_create_enums::RecurrenceType;
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.create_table(
Table::create()
.table(RecurrenceRule::Table)
.if_not_exists()
.col(
ColumnDef::new(RecurrenceRule::Id)
.uuid()
.primary_key()
.not_null(),
)
.col(
ColumnDef::new(RecurrenceRule::Type)
.custom(RecurrenceType::Type)
.not_null(),
)
.col(
ColumnDef::new(RecurrenceRule::Interval)
.integer()
.not_null()
.default(1),
)
.col(ColumnDef::new(RecurrenceRule::ByWeekday).integer().null())
.col(ColumnDef::new(RecurrenceRule::ByMonthday).integer().null())
.col(
ColumnDef::new(RecurrenceRule::Timezone)
.string()
.not_null()
.default("Asia/Shanghai"),
)
.col(
ColumnDef::new(RecurrenceRule::CreatedAt)
.timestamp()
.not_null()
.extra("DEFAULT NOW()"),
)
.col(
ColumnDef::new(RecurrenceRule::UpdatedAt)
.timestamp()
.not_null(),
)
.to_owned(),
)
.await
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.drop_table(Table::drop().table(RecurrenceRule::Table).to_owned())
.await
}
}
#[derive(DeriveIden)]
enum RecurrenceRule {
Table,
Id,
Type,
Interval,
ByWeekday,
ByMonthday,
Timezone,
CreatedAt,
UpdatedAt,
}

View File

@@ -0,0 +1,102 @@
use sea_orm_migration::prelude::*;
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.create_table(
Table::create()
.table(Todo::Table)
.if_not_exists()
.col(ColumnDef::new(Todo::Id).uuid().primary_key().not_null())
.col(ColumnDef::new(Todo::OwnerId).uuid().not_null())
.col(ColumnDef::new(Todo::Title).string().not_null())
.col(ColumnDef::new(Todo::Description).string().null())
.col(ColumnDef::new(Todo::DueAt).timestamp().not_null())
.col(ColumnDef::new(Todo::RecurrenceRuleId).uuid().null())
.col(
ColumnDef::new(Todo::CreatedAt)
.timestamp()
.not_null()
.extra("DEFAULT NOW()"),
)
.col(ColumnDef::new(Todo::UpdatedAt).timestamp().not_null())
.foreign_key(
ForeignKey::create()
.name("FK_todo_owner")
.from(Todo::Table, Todo::OwnerId)
.to(User::Table, User::Id)
.on_delete(ForeignKeyAction::Cascade)
.on_update(ForeignKeyAction::Cascade),
)
.foreign_key(
ForeignKey::create()
.name("FK_todo_recurrence_rule")
.from(Todo::Table, Todo::RecurrenceRuleId)
.to(RecurrenceRule::Table, RecurrenceRule::Id)
.on_delete(ForeignKeyAction::SetNull)
.on_update(ForeignKeyAction::Cascade),
)
.to_owned(),
)
.await?;
// Create indexes
manager
.create_index(
Index::create()
.name("IDX_todo_owner_due")
.table(Todo::Table)
.col(Todo::OwnerId)
.col(Todo::DueAt)
.to_owned(),
)
.await?;
manager
.create_index(
Index::create()
.name("IDX_todo_recurrence_rule_id")
.table(Todo::Table)
.col(Todo::RecurrenceRuleId)
.to_owned(),
)
.await?;
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.drop_table(Table::drop().table(Todo::Table).to_owned())
.await
}
}
#[derive(DeriveIden)]
enum Todo {
Table,
Id,
OwnerId,
Title,
Description,
DueAt,
RecurrenceRuleId,
CreatedAt,
UpdatedAt,
}
#[derive(DeriveIden)]
enum User {
Table,
Id,
}
#[derive(DeriveIden)]
enum RecurrenceRule {
Table,
Id,
}

View File

@@ -0,0 +1,111 @@
use sea_orm_migration::prelude::*;
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.create_table(
Table::create()
.table(ReminderTask::Table)
.if_not_exists()
.col(
ColumnDef::new(ReminderTask::Id)
.uuid()
.primary_key()
.not_null(),
)
.col(ColumnDef::new(ReminderTask::CreatorId).uuid().not_null())
.col(ColumnDef::new(ReminderTask::Title).string().not_null())
.col(ColumnDef::new(ReminderTask::Description).string().null())
.col(ColumnDef::new(ReminderTask::DueAt).timestamp().not_null())
.col(ColumnDef::new(ReminderTask::RecurrenceRuleId).uuid().null())
.col(
ColumnDef::new(ReminderTask::CreatedAt)
.timestamp()
.not_null()
.extra("DEFAULT NOW()"),
)
.col(
ColumnDef::new(ReminderTask::UpdatedAt)
.timestamp()
.not_null(),
)
.foreign_key(
ForeignKey::create()
.name("FK_reminder_task_creator")
.from(ReminderTask::Table, ReminderTask::CreatorId)
.to(User::Table, User::Id)
.on_delete(ForeignKeyAction::Cascade)
.on_update(ForeignKeyAction::Cascade),
)
.foreign_key(
ForeignKey::create()
.name("FK_reminder_task_recurrence_rule")
.from(ReminderTask::Table, ReminderTask::RecurrenceRuleId)
.to(RecurrenceRule::Table, RecurrenceRule::Id)
.on_delete(ForeignKeyAction::SetNull)
.on_update(ForeignKeyAction::Cascade),
)
.to_owned(),
)
.await?;
// Create indexes
manager
.create_index(
Index::create()
.name("IDX_reminder_task_creator_due")
.table(ReminderTask::Table)
.col(ReminderTask::CreatorId)
.col(ReminderTask::DueAt)
.to_owned(),
)
.await?;
manager
.create_index(
Index::create()
.name("IDX_reminder_task_recurrence_rule_id")
.table(ReminderTask::Table)
.col(ReminderTask::RecurrenceRuleId)
.to_owned(),
)
.await?;
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.drop_table(Table::drop().table(ReminderTask::Table).to_owned())
.await
}
}
#[derive(DeriveIden)]
enum ReminderTask {
Table,
Id,
CreatorId,
Title,
Description,
DueAt,
RecurrenceRuleId,
CreatedAt,
UpdatedAt,
}
#[derive(DeriveIden)]
enum User {
Table,
Id,
}
#[derive(DeriveIden)]
enum RecurrenceRule {
Table,
Id,
}

View File

@@ -0,0 +1,87 @@
use sea_orm_migration::prelude::*;
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.create_table(
Table::create()
.table(ReminderTaskRecipient::Table)
.if_not_exists()
.col(
ColumnDef::new(ReminderTaskRecipient::TaskId)
.uuid()
.not_null(),
)
.col(
ColumnDef::new(ReminderTaskRecipient::UserId)
.uuid()
.not_null(),
)
.primary_key(
Index::create()
.col(ReminderTaskRecipient::TaskId)
.col(ReminderTaskRecipient::UserId),
)
.foreign_key(
ForeignKey::create()
.name("FK_reminder_task_recipient_task")
.from(ReminderTaskRecipient::Table, ReminderTaskRecipient::TaskId)
.to(ReminderTask::Table, ReminderTask::Id)
.on_delete(ForeignKeyAction::Cascade)
.on_update(ForeignKeyAction::Cascade),
)
.foreign_key(
ForeignKey::create()
.name("FK_reminder_task_recipient_user")
.from(ReminderTaskRecipient::Table, ReminderTaskRecipient::UserId)
.to(User::Table, User::Id)
.on_delete(ForeignKeyAction::Cascade)
.on_update(ForeignKeyAction::Cascade),
)
.to_owned(),
)
.await?;
// Create index on user_id for reverse lookups
manager
.create_index(
Index::create()
.name("IDX_reminder_task_recipient_user_id")
.table(ReminderTaskRecipient::Table)
.col(ReminderTaskRecipient::UserId)
.to_owned(),
)
.await?;
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.drop_table(Table::drop().table(ReminderTaskRecipient::Table).to_owned())
.await
}
}
#[derive(DeriveIden)]
enum ReminderTaskRecipient {
Table,
TaskId,
UserId,
}
#[derive(DeriveIden)]
enum ReminderTask {
Table,
Id,
}
#[derive(DeriveIden)]
enum User {
Table,
Id,
}

View File

@@ -0,0 +1,87 @@
use sea_orm_migration::prelude::*;
use crate::m20220101_000002_create_enums::TargetType;
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.create_table(
Table::create()
.table(ReminderOffset::Table)
.if_not_exists()
.col(
ColumnDef::new(ReminderOffset::Id)
.uuid()
.primary_key()
.not_null(),
)
.col(
ColumnDef::new(ReminderOffset::TargetType)
.custom(TargetType::Type)
.not_null(),
)
.col(ColumnDef::new(ReminderOffset::TargetId).uuid().not_null())
.col(
ColumnDef::new(ReminderOffset::OffsetMinutes)
.integer()
.not_null(),
)
.col(
ColumnDef::new(ReminderOffset::ChannelInapp)
.boolean()
.not_null()
.default(true),
)
.col(
ColumnDef::new(ReminderOffset::ChannelBark)
.boolean()
.not_null()
.default(false),
)
.col(
ColumnDef::new(ReminderOffset::CreatedAt)
.timestamp()
.not_null()
.extra("DEFAULT NOW()"),
)
.to_owned(),
)
.await?;
// Create index for polymorphic lookup
manager
.create_index(
Index::create()
.name("IDX_reminder_offset_target")
.table(ReminderOffset::Table)
.col(ReminderOffset::TargetType)
.col(ReminderOffset::TargetId)
.to_owned(),
)
.await?;
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.drop_table(Table::drop().table(ReminderOffset::Table).to_owned())
.await
}
}
#[derive(DeriveIden)]
enum ReminderOffset {
Table,
Id,
TargetType,
TargetId,
OffsetMinutes,
ChannelInapp,
ChannelBark,
CreatedAt,
}

View File

@@ -0,0 +1,141 @@
use sea_orm_migration::prelude::*;
use crate::m20220101_000002_create_enums::{ChannelType, NotificationStatus, TargetType};
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.create_table(
Table::create()
.table(Notification::Table)
.if_not_exists()
.col(
ColumnDef::new(Notification::Id)
.uuid()
.primary_key()
.not_null(),
)
.col(ColumnDef::new(Notification::RecipientId).uuid().not_null())
.col(
ColumnDef::new(Notification::TargetType)
.custom(TargetType::Type)
.not_null(),
)
.col(ColumnDef::new(Notification::TargetId).uuid().not_null())
.col(
ColumnDef::new(Notification::TriggerAt)
.timestamp()
.not_null(),
)
.col(
ColumnDef::new(Notification::Channel)
.custom(ChannelType::Type)
.not_null(),
)
.col(
ColumnDef::new(Notification::Status)
.custom(NotificationStatus::Type)
.not_null()
.default("pending"),
)
.col(ColumnDef::new(Notification::LockedAt).timestamp().null())
.col(ColumnDef::new(Notification::SentAt).timestamp().null())
.col(ColumnDef::new(Notification::ReadAt).timestamp().null())
.col(
ColumnDef::new(Notification::CreatedAt)
.timestamp()
.not_null()
.extra("DEFAULT NOW()"),
)
.col(
ColumnDef::new(Notification::UpdatedAt)
.timestamp()
.not_null(),
)
.foreign_key(
ForeignKey::create()
.name("FK_notification_recipient")
.from(Notification::Table, Notification::RecipientId)
.to(User::Table, User::Id)
.on_delete(ForeignKeyAction::Cascade)
.on_update(ForeignKeyAction::Cascade),
)
.to_owned(),
)
.await?;
// Create unique constraint
manager
.create_index(
Index::create()
.name("UQ_notification_recipient_target_trigger_channel")
.table(Notification::Table)
.col(Notification::RecipientId)
.col(Notification::TargetType)
.col(Notification::TargetId)
.col(Notification::TriggerAt)
.col(Notification::Channel)
.unique()
.to_owned(),
)
.await?;
// Create indexes
manager
.create_index(
Index::create()
.name("IDX_notification_status_trigger")
.table(Notification::Table)
.col(Notification::Status)
.col(Notification::TriggerAt)
.to_owned(),
)
.await?;
manager
.create_index(
Index::create()
.name("IDX_notification_recipient_read")
.table(Notification::Table)
.col(Notification::RecipientId)
.col(Notification::ReadAt)
.to_owned(),
)
.await?;
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.drop_table(Table::drop().table(Notification::Table).to_owned())
.await
}
}
#[derive(DeriveIden)]
enum Notification {
Table,
Id,
RecipientId,
TargetType,
TargetId,
TriggerAt,
Channel,
Status,
LockedAt,
SentAt,
ReadAt,
CreatedAt,
UpdatedAt,
}
#[derive(DeriveIden)]
enum User {
Table,
Id,
}

View File

@@ -0,0 +1,94 @@
use sea_orm_migration::prelude::*;
use crate::m20220101_000002_create_enums::{ChannelType, NotificationStatus};
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.create_table(
Table::create()
.table(DeliveryLog::Table)
.if_not_exists()
.col(
ColumnDef::new(DeliveryLog::Id)
.uuid()
.primary_key()
.not_null(),
)
.col(
ColumnDef::new(DeliveryLog::NotificationId)
.uuid()
.not_null(),
)
.col(ColumnDef::new(DeliveryLog::AttemptNo).integer().not_null())
.col(
ColumnDef::new(DeliveryLog::Channel)
.custom(ChannelType::Type)
.not_null(),
)
.col(
ColumnDef::new(DeliveryLog::Status)
.custom(NotificationStatus::Type)
.not_null(),
)
.col(ColumnDef::new(DeliveryLog::ResponseMeta).json_binary().null())
.col(
ColumnDef::new(DeliveryLog::CreatedAt)
.timestamp()
.not_null()
.extra("DEFAULT NOW()"),
)
.foreign_key(
ForeignKey::create()
.name("FK_delivery_log_notification")
.from(DeliveryLog::Table, DeliveryLog::NotificationId)
.to(Notification::Table, Notification::Id)
.on_delete(ForeignKeyAction::Cascade)
.on_update(ForeignKeyAction::Cascade),
)
.to_owned(),
)
.await?;
// Create index
manager
.create_index(
Index::create()
.name("IDX_delivery_log_notification_id")
.table(DeliveryLog::Table)
.col(DeliveryLog::NotificationId)
.to_owned(),
)
.await?;
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.drop_table(Table::drop().table(DeliveryLog::Table).to_owned())
.await
}
}
#[derive(DeriveIden)]
enum DeliveryLog {
Table,
Id,
NotificationId,
AttemptNo,
Channel,
Status,
ResponseMeta,
CreatedAt,
}
#[derive(DeriveIden)]
enum Notification {
Table,
Id,
}

View File

@@ -0,0 +1,38 @@
use sea_orm_migration::prelude::*;
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.alter_table(
Table::alter()
.table(Todo::Table)
.add_column(ColumnDef::new(Todo::CheckInAt).timestamp().null())
.add_column(
ColumnDef::new(Todo::CheckInCount)
.integer()
.not_null()
.default(0),
)
.add_column(
ColumnDef::new(Todo::IsCheckedIn)
.boolean()
.not_null()
.default(false),
)
.to_owned(),
)
.await?;
Ok(())
}
}
#[derive(DeriveIden)]
enum Todo {
Table,
CheckInAt,
CheckInCount,
IsCheckedIn,
}

View File

@@ -0,0 +1,49 @@
use sea_orm_migration::prelude::*;
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.alter_table(
Table::alter()
.table(ReminderOffset::Table)
.add_column(ColumnDef::new(ReminderOffset::BarkTitle).string().null())
.add_column(ColumnDef::new(ReminderOffset::BarkSubtitle).string().null())
.add_column(ColumnDef::new(ReminderOffset::BarkBodyMarkdown).text().null())
.add_column(ColumnDef::new(ReminderOffset::BarkLevel).string().null())
.add_column(ColumnDef::new(ReminderOffset::BarkIcon).string().null())
.to_owned(),
)
.await?;
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.alter_table(
Table::alter()
.table(ReminderOffset::Table)
.drop_column(ReminderOffset::BarkTitle)
.drop_column(ReminderOffset::BarkSubtitle)
.drop_column(ReminderOffset::BarkBodyMarkdown)
.drop_column(ReminderOffset::BarkLevel)
.drop_column(ReminderOffset::BarkIcon)
.to_owned(),
)
.await?;
Ok(())
}
}
#[derive(DeriveIden)]
enum ReminderOffset {
Table,
BarkTitle,
BarkSubtitle,
BarkBodyMarkdown,
BarkLevel,
BarkIcon,
}

View File

@@ -0,0 +1,37 @@
use sea_orm_migration::prelude::*;
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.alter_table(
Table::alter()
.table(Notification::Table)
.add_column(ColumnDef::new(Notification::OffsetId).uuid().null())
.to_owned(),
)
.await?;
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.alter_table(
Table::alter()
.table(Notification::Table)
.drop_column(Notification::OffsetId)
.to_owned(),
)
.await?;
Ok(())
}
}
#[derive(DeriveIden)]
enum Notification {
Table,
OffsetId,
}

View File

@@ -0,0 +1,143 @@
use sea_orm_migration::prelude::*;
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let db = manager.get_connection();
// User table: created_at, updated_at
db.execute_unprepared(
"ALTER TABLE \"user\"
ALTER COLUMN created_at TYPE TIMESTAMP WITH TIME ZONE USING created_at AT TIME ZONE 'UTC',
ALTER COLUMN updated_at TYPE TIMESTAMP WITH TIME ZONE USING updated_at AT TIME ZONE 'UTC'"
).await?;
// Invite table: expires_at, revoked_at, created_at
db.execute_unprepared(
"ALTER TABLE invite
ALTER COLUMN expires_at TYPE TIMESTAMP WITH TIME ZONE USING expires_at AT TIME ZONE 'UTC',
ALTER COLUMN revoked_at TYPE TIMESTAMP WITH TIME ZONE USING revoked_at AT TIME ZONE 'UTC',
ALTER COLUMN created_at TYPE TIMESTAMP WITH TIME ZONE USING created_at AT TIME ZONE 'UTC'"
).await?;
// RecurrenceRule table: created_at, updated_at
db.execute_unprepared(
"ALTER TABLE recurrence_rule
ALTER COLUMN created_at TYPE TIMESTAMP WITH TIME ZONE USING created_at AT TIME ZONE 'UTC',
ALTER COLUMN updated_at TYPE TIMESTAMP WITH TIME ZONE USING updated_at AT TIME ZONE 'UTC'"
).await?;
// Todo table: due_at, created_at, updated_at, check_in_at
db.execute_unprepared(
"ALTER TABLE todo
ALTER COLUMN due_at TYPE TIMESTAMP WITH TIME ZONE USING due_at AT TIME ZONE 'UTC',
ALTER COLUMN created_at TYPE TIMESTAMP WITH TIME ZONE USING created_at AT TIME ZONE 'UTC',
ALTER COLUMN updated_at TYPE TIMESTAMP WITH TIME ZONE USING updated_at AT TIME ZONE 'UTC',
ALTER COLUMN check_in_at TYPE TIMESTAMP WITH TIME ZONE USING check_in_at AT TIME ZONE 'UTC'"
).await?;
// ReminderTask table: due_at, created_at, updated_at
db.execute_unprepared(
"ALTER TABLE reminder_task
ALTER COLUMN due_at TYPE TIMESTAMP WITH TIME ZONE USING due_at AT TIME ZONE 'UTC',
ALTER COLUMN created_at TYPE TIMESTAMP WITH TIME ZONE USING created_at AT TIME ZONE 'UTC',
ALTER COLUMN updated_at TYPE TIMESTAMP WITH TIME ZONE USING updated_at AT TIME ZONE 'UTC'"
).await?;
// ReminderOffset table: created_at
db.execute_unprepared(
"ALTER TABLE reminder_offset
ALTER COLUMN created_at TYPE TIMESTAMP WITH TIME ZONE USING created_at AT TIME ZONE 'UTC'"
).await?;
// Notification table: trigger_at, locked_at, sent_at, read_at, created_at, updated_at
db.execute_unprepared(
"ALTER TABLE notification
ALTER COLUMN trigger_at TYPE TIMESTAMP WITH TIME ZONE USING trigger_at AT TIME ZONE 'UTC',
ALTER COLUMN locked_at TYPE TIMESTAMP WITH TIME ZONE USING locked_at AT TIME ZONE 'UTC',
ALTER COLUMN sent_at TYPE TIMESTAMP WITH TIME ZONE USING sent_at AT TIME ZONE 'UTC',
ALTER COLUMN read_at TYPE TIMESTAMP WITH TIME ZONE USING read_at AT TIME ZONE 'UTC',
ALTER COLUMN created_at TYPE TIMESTAMP WITH TIME ZONE USING created_at AT TIME ZONE 'UTC',
ALTER COLUMN updated_at TYPE TIMESTAMP WITH TIME ZONE USING updated_at AT TIME ZONE 'UTC'"
).await?;
// DeliveryLog table: created_at
db.execute_unprepared(
"ALTER TABLE delivery_log
ALTER COLUMN created_at TYPE TIMESTAMP WITH TIME ZONE USING created_at AT TIME ZONE 'UTC'"
).await?;
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
let db = manager.get_connection();
// Revert User table
db.execute_unprepared(
"ALTER TABLE \"user\"
ALTER COLUMN created_at TYPE TIMESTAMP USING created_at AT TIME ZONE 'UTC',
ALTER COLUMN updated_at TYPE TIMESTAMP USING updated_at AT TIME ZONE 'UTC'"
).await?;
// Revert Invite table
db.execute_unprepared(
"ALTER TABLE invite
ALTER COLUMN expires_at TYPE TIMESTAMP USING expires_at AT TIME ZONE 'UTC',
ALTER COLUMN revoked_at TYPE TIMESTAMP USING revoked_at AT TIME ZONE 'UTC',
ALTER COLUMN created_at TYPE TIMESTAMP USING created_at AT TIME ZONE 'UTC'"
).await?;
// Revert RecurrenceRule table
db.execute_unprepared(
"ALTER TABLE recurrence_rule
ALTER COLUMN created_at TYPE TIMESTAMP USING created_at AT TIME ZONE 'UTC',
ALTER COLUMN updated_at TYPE TIMESTAMP USING updated_at AT TIME ZONE 'UTC'"
).await?;
// Revert Todo table
db.execute_unprepared(
"ALTER TABLE todo
ALTER COLUMN due_at TYPE TIMESTAMP USING due_at AT TIME ZONE 'UTC',
ALTER COLUMN created_at TYPE TIMESTAMP USING created_at AT TIME ZONE 'UTC',
ALTER COLUMN updated_at TYPE TIMESTAMP USING updated_at AT TIME ZONE 'UTC',
ALTER COLUMN check_in_at TYPE TIMESTAMP USING check_in_at AT TIME ZONE 'UTC'"
).await?;
// Revert ReminderTask table
db.execute_unprepared(
"ALTER TABLE reminder_task
ALTER COLUMN due_at TYPE TIMESTAMP USING due_at AT TIME ZONE 'UTC',
ALTER COLUMN created_at TYPE TIMESTAMP USING created_at AT TIME ZONE 'UTC',
ALTER COLUMN updated_at TYPE TIMESTAMP USING updated_at AT TIME ZONE 'UTC'"
).await?;
// Revert ReminderOffset table
db.execute_unprepared(
"ALTER TABLE reminder_offset
ALTER COLUMN created_at TYPE TIMESTAMP USING created_at AT TIME ZONE 'UTC'"
).await?;
// Revert Notification table
db.execute_unprepared(
"ALTER TABLE notification
ALTER COLUMN trigger_at TYPE TIMESTAMP USING trigger_at AT TIME ZONE 'UTC',
ALTER COLUMN locked_at TYPE TIMESTAMP USING locked_at AT TIME ZONE 'UTC',
ALTER COLUMN sent_at TYPE TIMESTAMP USING sent_at AT TIME ZONE 'UTC',
ALTER COLUMN read_at TYPE TIMESTAMP USING read_at AT TIME ZONE 'UTC',
ALTER COLUMN created_at TYPE TIMESTAMP USING created_at AT TIME ZONE 'UTC',
ALTER COLUMN updated_at TYPE TIMESTAMP USING updated_at AT TIME ZONE 'UTC'"
).await?;
// Revert DeliveryLog table
db.execute_unprepared(
"ALTER TABLE delivery_log
ALTER COLUMN created_at TYPE TIMESTAMP USING created_at AT TIME ZONE 'UTC'"
).await?;
Ok(())
}
}

View File

@@ -0,0 +1,86 @@
use sea_orm_migration::prelude::*;
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
// Add invite_id column to user table
manager
.alter_table(
Table::alter()
.table(User::Table)
.add_column(ColumnDef::new(User::InviteId).uuid().null())
.to_owned(),
)
.await?;
// Add foreign key constraint
manager
.create_foreign_key(
ForeignKey::create()
.name("fk_user_invite_id")
.from(User::Table, User::InviteId)
.to(Invite::Table, Invite::Id)
.on_delete(ForeignKeyAction::SetNull)
.on_update(ForeignKeyAction::Cascade)
.to_owned(),
)
.await?;
// Add index for better query performance
manager
.create_index(
Index::create()
.name("idx_user_invite_id")
.table(User::Table)
.col(User::InviteId)
.to_owned(),
)
.await?;
Ok(())
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
// Drop index
manager
.drop_index(Index::drop().name("idx_user_invite_id").to_owned())
.await?;
// Drop foreign key
manager
.drop_foreign_key(
ForeignKey::drop()
.table(User::Table)
.name("fk_user_invite_id")
.to_owned(),
)
.await?;
// Drop column
manager
.alter_table(
Table::alter()
.table(User::Table)
.drop_column(User::InviteId)
.to_owned(),
)
.await?;
Ok(())
}
}
#[derive(DeriveIden)]
enum User {
Table,
InviteId,
}
#[derive(DeriveIden)]
enum Invite {
Table,
Id,
}

View File

@@ -0,0 +1,6 @@
use sea_orm_migration::prelude::*;
#[tokio::main]
async fn main() {
cli::run_cli(migration::Migrator).await;
}

View File

@@ -0,0 +1,161 @@
use actix_web::{HttpResponse, Responder, Scope, post, web};
use sea_orm::{ActiveModelTrait, ColumnTrait, EntityTrait, QueryFilter, Set, TransactionTrait};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use crate::app_data::AppData;
use crate::entity::{invite, user};
use crate::error::ApiError;
use crate::middleware::auth::create_token;
#[derive(Debug, Deserialize)]
pub struct RegisterRequest {
pub username: String,
pub password: String,
pub invite_code: String,
}
#[derive(Debug, Deserialize)]
pub struct LoginRequest {
pub username: String,
pub password: String,
}
#[derive(Debug, Serialize)]
pub struct AuthResponse {
pub token: String,
pub user: UserInfo,
}
#[derive(Debug, Serialize)]
pub struct UserInfo {
pub id: Uuid,
pub username: String,
}
#[post("/register")]
async fn register(
app_data: web::Data<AppData>,
body: web::Json<RegisterRequest>,
) -> Result<impl Responder, ApiError> {
if body.username.len() < 3 {
return Err(ApiError::BadRequest("Invalid payload".to_string()));
}
if body.password.len() < 6 {
return Err(ApiError::BadRequest("Invalid payload".to_string()));
}
if body.invite_code.len() < 4 {
return Err(ApiError::BadRequest("Invalid payload".to_string()));
}
let now = chrono::Utc::now().fixed_offset();
let result = app_data
.db
.transaction::<_, (Uuid, String), ApiError>(|txn| {
let username = body.username.clone();
let password = body.password.clone();
let invite_code = body.invite_code.clone();
Box::pin(async move {
// Find valid invite
let inv = invite::Entity::find()
.filter(invite::Column::Code.eq(&invite_code))
.filter(invite::Column::RevokedAt.is_null())
.filter(invite::Column::ExpiresAt.gt(now))
.one(txn)
.await?
.ok_or_else(|| ApiError::BadRequest("Invalid invite".to_string()))?;
if inv.used_count >= inv.max_uses {
return Err(ApiError::BadRequest("Invalid invite".to_string()));
}
// Check username exists
let existing = user::Entity::find()
.filter(user::Column::Username.eq(&username))
.one(txn)
.await?;
if existing.is_some() {
return Err(ApiError::Conflict("Username taken".to_string()));
}
// Create user
let password_hash = bcrypt::hash(&password, 10)?;
let user_id = Uuid::new_v4();
let invite_id = inv.id;
let new_user = user::ActiveModel {
id: Set(user_id),
username: Set(username.clone()),
password_hash: Set(password_hash),
avatar: Set(None),
timezone: Set("Asia/Shanghai".to_string()),
bark_url: Set(None),
inapp_enabled: Set(true),
bark_enabled: Set(false),
invite_id: Set(Some(invite_id)),
created_at: Set(now),
updated_at: Set(now),
};
new_user.insert(txn).await?;
// Update invite used count
let mut inv_active: invite::ActiveModel = inv.into();
inv_active.used_count = Set(inv_active.used_count.unwrap() + 1);
inv_active.update(txn).await?;
Ok((user_id, username))
})
})
.await
.map_err(|e| match e {
sea_orm::TransactionError::Connection(e) => ApiError::Internal(e.to_string()),
sea_orm::TransactionError::Transaction(e) => e,
})?;
let token = create_token(result.0, &app_data.jwt_secret)?;
Ok(HttpResponse::Ok().json(AuthResponse {
token,
user: UserInfo {
id: result.0,
username: result.1,
},
}))
}
#[post("/login")]
async fn login(
app_data: web::Data<AppData>,
body: web::Json<LoginRequest>,
) -> Result<impl Responder, ApiError> {
if body.username.len() < 3 || body.password.len() < 6 {
return Err(ApiError::BadRequest("Invalid payload".to_string()));
}
let user = user::Entity::find()
.filter(user::Column::Username.eq(&body.username))
.one(&app_data.db)
.await?
.ok_or_else(|| ApiError::Unauthorized("Invalid credentials".to_string()))?;
let valid = bcrypt::verify(&body.password, &user.password_hash)?;
if !valid {
return Err(ApiError::Unauthorized("Invalid credentials".to_string()));
}
let token = create_token(user.id, &app_data.jwt_secret)?;
Ok(HttpResponse::Ok().json(AuthResponse {
token,
user: UserInfo {
id: user.id,
username: user.username,
},
}))
}
pub fn routes() -> Scope {
web::scope("/api/auth").service(register).service(login)
}

View File

@@ -0,0 +1,10 @@
use actix_web::{HttpResponse, Responder, Scope, get, web};
#[get("")]
async fn health() -> impl Responder {
HttpResponse::Ok().body("OK")
}
pub fn routes() -> Scope {
web::scope("/health").service(health)
}

View File

@@ -0,0 +1,167 @@
use actix_web::{HttpResponse, Responder, Scope, get, post, web};
use rand::{Rng, distr::Alphanumeric};
use sea_orm::{ActiveModelTrait, ColumnTrait, EntityTrait, QueryFilter, QueryOrder, Set};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use crate::app_data::AppData;
use crate::entity::{invite, user};
use crate::error::ApiError;
use crate::middleware::auth::AuthUser;
#[derive(Debug, Deserialize)]
pub struct CreateInviteRequest {
pub max_uses: Option<i32>,
pub expires_in_days: Option<i32>,
}
#[derive(Debug, Serialize)]
pub struct OkResponse {
pub ok: bool,
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct RegisteredUserInfo {
pub id: Uuid,
pub username: String,
pub avatar: Option<String>,
pub created_at: chrono::DateTime<chrono::FixedOffset>,
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct InviteWithUsers {
pub id: Uuid,
pub code: String,
pub creator_id: Uuid,
pub max_uses: i32,
pub used_count: i32,
pub expires_at: chrono::DateTime<chrono::FixedOffset>,
pub revoked_at: Option<chrono::DateTime<chrono::FixedOffset>>,
pub created_at: chrono::DateTime<chrono::FixedOffset>,
pub registered_users: Vec<RegisteredUserInfo>,
}
#[post("")]
async fn create_invite(
app_data: web::Data<AppData>,
auth: AuthUser,
body: web::Json<CreateInviteRequest>,
) -> Result<impl Responder, ApiError> {
let max_uses = body.max_uses.unwrap_or(5).clamp(1, 20);
let expires_in_days = body.expires_in_days.unwrap_or(7).clamp(1, 30);
let expires_at =
chrono::Utc::now().fixed_offset() + chrono::Duration::days(expires_in_days as i64);
let code = generate_invite_code();
let new_invite = invite::ActiveModel {
id: Set(Uuid::new_v4()),
code: Set(code),
creator_id: Set(auth.user_id),
max_uses: Set(max_uses),
used_count: Set(0),
expires_at: Set(expires_at),
revoked_at: Set(None),
created_at: Set(chrono::Utc::now().fixed_offset()),
};
let result = new_invite.insert(&app_data.db).await?;
Ok(HttpResponse::Ok().json(result))
}
#[get("")]
async fn list_invites(
app_data: web::Data<AppData>,
auth: AuthUser,
) -> Result<impl Responder, ApiError> {
let invites = invite::Entity::find()
.filter(invite::Column::CreatorId.eq(auth.user_id))
.order_by_desc(invite::Column::CreatedAt)
.all(&app_data.db)
.await?;
Ok(HttpResponse::Ok().json(invites))
}
#[get("/{id}")]
async fn get_invite(
app_data: web::Data<AppData>,
auth: AuthUser,
path: web::Path<Uuid>,
) -> Result<impl Responder, ApiError> {
let id = path.into_inner();
let invite = invite::Entity::find_by_id(id)
.filter(invite::Column::CreatorId.eq(auth.user_id))
.one(&app_data.db)
.await?
.ok_or_else(|| ApiError::NotFound("Invite not found".to_string()))?;
// Get users who registered with this invite
let registered_users = user::Entity::find()
.filter(user::Column::InviteId.eq(invite.id))
.order_by_asc(user::Column::CreatedAt)
.all(&app_data.db)
.await?;
let registered_users_info: Vec<RegisteredUserInfo> = registered_users
.into_iter()
.map(|u| RegisteredUserInfo {
id: u.id,
username: u.username,
avatar: u.avatar,
created_at: u.created_at,
})
.collect();
Ok(HttpResponse::Ok().json(InviteWithUsers {
id: invite.id,
code: invite.code,
creator_id: invite.creator_id,
max_uses: invite.max_uses,
used_count: invite.used_count,
expires_at: invite.expires_at,
revoked_at: invite.revoked_at,
created_at: invite.created_at,
registered_users: registered_users_info,
}))
}
#[post("/{id}/revoke")]
async fn revoke_invite(
app_data: web::Data<AppData>,
auth: AuthUser,
path: web::Path<Uuid>,
) -> Result<impl Responder, ApiError> {
let id = path.into_inner();
let invite = invite::Entity::find_by_id(id)
.filter(invite::Column::CreatorId.eq(auth.user_id))
.one(&app_data.db)
.await?
.ok_or_else(|| ApiError::NotFound("Invite not found".to_string()))?;
let mut active: invite::ActiveModel = invite.into();
active.revoked_at = Set(Some(chrono::Utc::now().fixed_offset()));
active.update(&app_data.db).await?;
Ok(HttpResponse::Ok().json(OkResponse { ok: true }))
}
fn generate_invite_code() -> String {
let mut rng = rand::rng();
let suffix: String = (0..6).map(|_| rng.sample(Alphanumeric) as char).collect();
format!("INV-{}", suffix)
}
pub fn routes() -> Scope {
web::scope("/api/invites")
.service(create_invite)
.service(list_invites)
.service(get_invite)
.service(revoke_invite)
}

216
backend_rust/src/api/me.rs Normal file
View File

@@ -0,0 +1,216 @@
use actix_multipart::Multipart;
use actix_web::{HttpResponse, Responder, Scope, get, post, put, web};
use futures_util::StreamExt;
use sea_orm::{ActiveModelTrait, EntityTrait, Set};
use serde::{Deserialize, Serialize};
use std::io::Write;
use uuid::Uuid;
use crate::app_data::AppData;
use crate::entity::user;
use crate::error::ApiError;
use crate::middleware::auth::AuthUser;
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct MeResponse {
pub id: Uuid,
pub username: String,
pub avatar: Option<String>,
pub timezone: String,
pub bark_url: Option<String>,
pub inapp_enabled: bool,
pub bark_enabled: bool,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct UpdateSettingsRequest {
pub avatar: Option<String>,
pub timezone: Option<String>,
pub bark_url: Option<String>,
pub inapp_enabled: Option<bool>,
pub bark_enabled: Option<bool>,
}
#[get("")]
async fn get_me(
app_data: web::Data<AppData>,
auth: AuthUser,
) -> Result<impl Responder, ApiError> {
let user = user::Entity::find_by_id(auth.user_id)
.one(&app_data.db)
.await?
.ok_or_else(|| ApiError::NotFound("User not found".to_string()))?;
Ok(HttpResponse::Ok().json(MeResponse {
id: user.id,
username: user.username,
avatar: user.avatar,
timezone: user.timezone,
bark_url: user.bark_url,
inapp_enabled: user.inapp_enabled,
bark_enabled: user.bark_enabled,
}))
}
#[put("/settings")]
async fn update_settings(
app_data: web::Data<AppData>,
auth: AuthUser,
body: web::Json<UpdateSettingsRequest>,
) -> Result<impl Responder, ApiError> {
let user = user::Entity::find_by_id(auth.user_id)
.one(&app_data.db)
.await?
.ok_or_else(|| ApiError::NotFound("User not found".to_string()))?;
let mut active: user::ActiveModel = user.into();
if let Some(avatar) = &body.avatar {
active.avatar = Set(Some(avatar.clone()));
}
if let Some(timezone) = &body.timezone {
active.timezone = Set(timezone.clone());
}
if let Some(bark_url) = &body.bark_url {
active.bark_url = Set(Some(bark_url.clone()));
}
if let Some(inapp_enabled) = body.inapp_enabled {
active.inapp_enabled = Set(inapp_enabled);
}
if let Some(bark_enabled) = body.bark_enabled {
active.bark_enabled = Set(bark_enabled);
}
active.updated_at = Set(chrono::Utc::now().fixed_offset());
let updated = active.update(&app_data.db).await?;
Ok(HttpResponse::Ok().json(MeResponse {
id: updated.id,
username: updated.username,
avatar: updated.avatar,
timezone: updated.timezone,
bark_url: updated.bark_url,
inapp_enabled: updated.inapp_enabled,
bark_enabled: updated.bark_enabled,
}))
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct UploadAvatarResponse {
pub avatar_url: String,
}
/// 上传用户头像
/// 支持的格式: jpg, jpeg, png, gif, webp
/// 最大文件大小: 5MB
#[post("/avatar")]
async fn upload_avatar(
app_data: web::Data<AppData>,
auth: AuthUser,
mut payload: Multipart,
) -> Result<impl Responder, ApiError> {
const MAX_FILE_SIZE: usize = 5 * 1024 * 1024; // 5MB
const ALLOWED_EXTENSIONS: &[&str] = &["jpg", "jpeg", "png", "gif", "webp"];
let mut file_data: Option<(Vec<u8>, String)> = None;
// 解析 multipart 表单数据
while let Some(item) = payload.next().await {
let mut field = item.map_err(|e| ApiError::BadRequest(format!("Multipart error: {}", e)))?;
let content_disposition = field
.content_disposition()
.ok_or_else(|| ApiError::BadRequest("Missing content disposition".to_string()))?;
let field_name = content_disposition.get_name().unwrap_or("");
if field_name != "avatar" {
continue;
}
// 获取文件名和扩展名
let filename = content_disposition
.get_filename()
.ok_or_else(|| ApiError::BadRequest("Missing filename".to_string()))?;
let extension: String = filename
.rsplit('.')
.next()
.map(|s| s.to_lowercase())
.ok_or_else(|| ApiError::BadRequest("Invalid filename".to_string()))?;
if !ALLOWED_EXTENSIONS.contains(&extension.as_str()) {
return Err(ApiError::BadRequest(format!(
"Unsupported file format. Allowed: {}",
ALLOWED_EXTENSIONS.join(", ")
)));
}
// 读取文件内容
let mut data = Vec::new();
while let Some(chunk) = field.next().await {
let chunk =
chunk.map_err(|e| ApiError::BadRequest(format!("Error reading chunk: {}", e)))?;
if data.len() + chunk.len() > MAX_FILE_SIZE {
return Err(ApiError::BadRequest(format!(
"File too large. Max size: {}MB",
MAX_FILE_SIZE / 1024 / 1024
)));
}
data.extend_from_slice(&chunk);
}
file_data = Some((data, extension));
break;
}
let (data, extension) = file_data.ok_or_else(|| {
ApiError::BadRequest("No avatar file provided. Use field name 'avatar'".to_string())
})?;
// 生成唯一文件名
let file_id = Uuid::new_v4();
let filename = format!("{}.{}", file_id, extension);
let file_path = app_data.upload_dir.join("avatars").join(&filename);
// 保存文件
let mut file = std::fs::File::create(&file_path)
.map_err(|e| ApiError::Internal(format!("Failed to create file: {}", e)))?;
file.write_all(&data)
.map_err(|e| ApiError::Internal(format!("Failed to write file: {}", e)))?;
// 生成头像 URL
let avatar_url = format!("{}/uploads/avatars/{}", app_data.base_url, filename);
// 更新用户头像
let user = user::Entity::find_by_id(auth.user_id)
.one(&app_data.db)
.await?
.ok_or_else(|| ApiError::NotFound("User not found".to_string()))?;
// 如果用户之前有头像,尝试删除旧文件
if let Some(old_avatar) = &user.avatar {
if let Some(old_filename) = old_avatar.rsplit('/').next() {
let old_path = app_data.upload_dir.join("avatars").join(old_filename);
// 忽略删除错误,旧文件可能不存在
let _ = std::fs::remove_file(old_path);
}
}
let mut active: user::ActiveModel = user.into();
active.avatar = Set(Some(avatar_url.clone()));
active.updated_at = Set(chrono::Utc::now().fixed_offset());
active.update(&app_data.db).await?;
Ok(HttpResponse::Ok().json(UploadAvatarResponse { avatar_url }))
}
pub fn routes() -> Scope {
web::scope("/api/me")
.service(get_me)
.service(update_settings)
.service(upload_avatar)
}

View File

@@ -0,0 +1,8 @@
pub mod auth;
pub mod health;
pub mod invites;
pub mod me;
pub mod notifications;
pub mod reminder_tasks;
pub mod todos;
pub mod users;

View File

@@ -0,0 +1,95 @@
use actix_web::{HttpResponse, Responder, Scope, get, post, web};
use sea_orm::{ActiveModelTrait, ColumnTrait, EntityTrait, QueryFilter, QueryOrder, Set};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use crate::app_data::AppData;
use crate::entity::notification;
use crate::error::ApiError;
use crate::middleware::auth::AuthUser;
#[derive(Debug, Deserialize)]
pub struct ListQuery {
pub status: Option<String>,
}
#[derive(Debug, Serialize)]
pub struct OkResponse {
pub ok: bool,
}
#[get("")]
async fn list_notifications(
app_data: web::Data<AppData>,
auth: AuthUser,
query: web::Query<ListQuery>,
) -> Result<impl Responder, ApiError> {
let status = query.status.as_deref().unwrap_or("all");
let mut q =
notification::Entity::find().filter(notification::Column::RecipientId.eq(auth.user_id));
if status == "unread" {
q = q.filter(notification::Column::ReadAt.is_null());
}
let notifications = q
.order_by_desc(notification::Column::TriggerAt)
.all(&app_data.db)
.await?;
Ok(HttpResponse::Ok().json(notifications))
}
#[post("/{id}/read")]
async fn mark_read(
app_data: web::Data<AppData>,
auth: AuthUser,
path: web::Path<Uuid>,
) -> Result<impl Responder, ApiError> {
let id = path.into_inner();
let notif = notification::Entity::find_by_id(id)
.filter(notification::Column::RecipientId.eq(auth.user_id))
.one(&app_data.db)
.await?
.ok_or_else(|| ApiError::NotFound("Not found".to_string()))?;
let mut active: notification::ActiveModel = notif.into();
active.read_at = Set(Some(chrono::Utc::now().fixed_offset()));
active.updated_at = Set(chrono::Utc::now().fixed_offset());
active.update(&app_data.db).await?;
Ok(HttpResponse::Ok().json(OkResponse { ok: true }))
}
#[post("/read-all")]
async fn mark_all_read(
app_data: web::Data<AppData>,
auth: AuthUser,
) -> Result<impl Responder, ApiError> {
let now = chrono::Utc::now().fixed_offset();
notification::Entity::update_many()
.filter(notification::Column::RecipientId.eq(auth.user_id))
.filter(notification::Column::ReadAt.is_null())
.col_expr(
notification::Column::ReadAt,
sea_orm::sea_query::Expr::value(now),
)
.col_expr(
notification::Column::UpdatedAt,
sea_orm::sea_query::Expr::value(now),
)
.exec(&app_data.db)
.await?;
Ok(HttpResponse::Ok().json(OkResponse { ok: true }))
}
pub fn routes() -> Scope {
web::scope("/api/notifications")
.service(list_notifications)
.service(mark_all_read)
.service(mark_read)
}

View File

@@ -0,0 +1,451 @@
use actix_web::{HttpResponse, Responder, Scope, delete, get, post, put, web};
use sea_orm::{
ActiveModelTrait, ColumnTrait, EntityTrait, QueryFilter, QueryOrder, Set, TransactionTrait,
};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use crate::app_data::AppData;
use crate::entity::sea_orm_active_enums::{RecurrenceType, TargetType};
use crate::entity::{recurrence_rule, reminder_offset, reminder_task, reminder_task_recipient};
use crate::error::ApiError;
use crate::middleware::auth::AuthUser;
use crate::timer::WorkerCommand;
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct RecurrenceRuleInput {
pub r#type: String,
pub interval: Option<i32>,
pub by_weekday: Option<i32>,
pub by_monthday: Option<i32>,
pub timezone: Option<String>,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct OffsetInput {
pub offset_minutes: i32,
pub channel_inapp: Option<bool>,
pub channel_bark: Option<bool>,
/// Custom title for Bark push notification
pub bark_title: Option<String>,
/// Custom subtitle for Bark push notification
pub bark_subtitle: Option<String>,
/// Markdown content for Bark push (overrides body if set)
pub bark_body_markdown: Option<String>,
/// Alert level: active, timeSensitive, passive, critical
pub bark_level: Option<String>,
/// Custom icon URL for Bark push
pub bark_icon: Option<String>,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct ReminderTaskInput {
pub title: String,
pub description: Option<String>,
pub due_at: chrono::DateTime<chrono::Utc>,
pub recipient_ids: Vec<Uuid>,
pub recurrence_rule: Option<RecurrenceRuleInput>,
pub offsets: Option<Vec<OffsetInput>>,
}
#[derive(Debug, Serialize)]
pub struct OkResponse {
pub ok: bool,
}
#[get("")]
async fn list_tasks(
app_data: web::Data<AppData>,
auth: AuthUser,
) -> Result<impl Responder, ApiError> {
let items = reminder_task::Entity::find()
.filter(reminder_task::Column::CreatorId.eq(auth.user_id))
.order_by_desc(reminder_task::Column::DueAt)
.find_also_related(recurrence_rule::Entity)
.all(&app_data.db)
.await?;
let task_ids: Vec<Uuid> = items.iter().map(|(t, _)| t.id).collect();
let recipients = reminder_task_recipient::Entity::find()
.filter(reminder_task_recipient::Column::TaskId.is_in(task_ids.clone()))
.all(&app_data.db)
.await?;
let offsets = reminder_offset::Entity::find()
.filter(reminder_offset::Column::TargetType.eq(TargetType::ReminderTask))
.filter(reminder_offset::Column::TargetId.is_in(task_ids))
.all(&app_data.db)
.await?;
let result: Vec<serde_json::Value> = items
.into_iter()
.map(|(t, rule)| {
let task_recipients: Vec<_> = recipients
.iter()
.filter(|r| r.task_id == t.id)
.cloned()
.collect();
let task_offsets: Vec<_> = offsets
.iter()
.filter(|o| o.target_id == t.id)
.cloned()
.collect();
serde_json::json!({
"id": t.id,
"creatorId": t.creator_id,
"title": t.title,
"description": t.description,
"dueAt": t.due_at,
"recurrenceRuleId": t.recurrence_rule_id,
"createdAt": t.created_at,
"updatedAt": t.updated_at,
"recurrenceRule": rule,
"recipients": task_recipients,
"offsets": task_offsets,
})
})
.collect();
Ok(HttpResponse::Ok().json(result))
}
#[post("")]
async fn create_task(
app_data: web::Data<AppData>,
auth: AuthUser,
body: web::Json<ReminderTaskInput>,
) -> Result<impl Responder, ApiError> {
if body.title.is_empty() {
return Err(ApiError::BadRequest("Invalid payload".to_string()));
}
let now = chrono::Utc::now().fixed_offset();
let body = body.into_inner();
let user_id = auth.user_id;
let result = app_data
.db
.transaction::<_, reminder_task::Model, ApiError>(|txn| {
Box::pin(async move {
// Create recurrence rule if provided
let rule_id = if let Some(rule_input) = body.recurrence_rule {
let rule = recurrence_rule::ActiveModel {
id: Set(Uuid::new_v4()),
r#type: Set(parse_recurrence_type(&rule_input.r#type)?),
interval: Set(rule_input.interval.unwrap_or(1)),
by_weekday: Set(rule_input.by_weekday),
by_monthday: Set(rule_input.by_monthday),
timezone: Set(rule_input.timezone.unwrap_or("Asia/Shanghai".to_string())),
created_at: Set(now),
updated_at: Set(now),
};
let created = rule.insert(txn).await?;
Some(created.id)
} else {
None
};
// Create task
let new_task = reminder_task::ActiveModel {
id: Set(Uuid::new_v4()),
creator_id: Set(user_id),
title: Set(body.title),
description: Set(body.description),
due_at: Set(body.due_at.fixed_offset()),
recurrence_rule_id: Set(rule_id),
created_at: Set(now),
updated_at: Set(now),
};
let created_task = new_task.insert(txn).await?;
// Create recipients
for recipient_id in body.recipient_ids {
let new_recipient = reminder_task_recipient::ActiveModel {
task_id: Set(created_task.id),
user_id: Set(recipient_id),
};
new_recipient.insert(txn).await?;
}
// Create offsets
if let Some(offsets) = body.offsets {
for offset in offsets {
let new_offset = reminder_offset::ActiveModel {
id: Set(Uuid::new_v4()),
target_type: Set(TargetType::ReminderTask),
target_id: Set(created_task.id),
offset_minutes: Set(offset.offset_minutes),
channel_inapp: Set(offset.channel_inapp.unwrap_or(true)),
channel_bark: Set(offset.channel_bark.unwrap_or(false)),
created_at: Set(now),
bark_title: Set(offset.bark_title),
bark_subtitle: Set(offset.bark_subtitle),
bark_body_markdown: Set(offset.bark_body_markdown),
bark_level: Set(offset.bark_level),
bark_icon: Set(offset.bark_icon),
};
new_offset.insert(txn).await?;
}
}
Ok(created_task)
})
})
.await
.map_err(|e| match e {
sea_orm::TransactionError::Connection(e) => ApiError::Internal(e.to_string()),
sea_orm::TransactionError::Transaction(e) => e,
})?;
// 触发通知生成
let _ = app_data
.send_worker_command(WorkerCommand::GenerateNotifications {
target_type: TargetType::ReminderTask,
target_id: result.id,
})
.await;
Ok(HttpResponse::Ok().json(result))
}
#[get("/{id}")]
async fn get_task(
app_data: web::Data<AppData>,
auth: AuthUser,
path: web::Path<Uuid>,
) -> Result<impl Responder, ApiError> {
let id = path.into_inner();
let (t, rule) = reminder_task::Entity::find_by_id(id)
.filter(reminder_task::Column::CreatorId.eq(auth.user_id))
.find_also_related(recurrence_rule::Entity)
.one(&app_data.db)
.await?
.ok_or_else(|| ApiError::NotFound("Not found".to_string()))?;
let recipients = reminder_task_recipient::Entity::find()
.filter(reminder_task_recipient::Column::TaskId.eq(t.id))
.all(&app_data.db)
.await?;
let offsets = reminder_offset::Entity::find()
.filter(reminder_offset::Column::TargetType.eq(TargetType::ReminderTask))
.filter(reminder_offset::Column::TargetId.eq(t.id))
.all(&app_data.db)
.await?;
let result = serde_json::json!({
"id": t.id,
"creatorId": t.creator_id,
"title": t.title,
"description": t.description,
"dueAt": t.due_at,
"recurrenceRuleId": t.recurrence_rule_id,
"createdAt": t.created_at,
"updatedAt": t.updated_at,
"recurrenceRule": rule,
"recipients": recipients,
"offsets": offsets,
});
Ok(HttpResponse::Ok().json(result))
}
#[put("/{id}")]
async fn update_task(
app_data: web::Data<AppData>,
auth: AuthUser,
path: web::Path<Uuid>,
body: web::Json<ReminderTaskInput>,
) -> Result<impl Responder, ApiError> {
let id = path.into_inner();
if body.title.is_empty() {
return Err(ApiError::BadRequest("Invalid payload".to_string()));
}
let now = chrono::Utc::now().fixed_offset();
let body = body.into_inner();
let user_id = auth.user_id;
let result = app_data
.db
.transaction::<_, reminder_task::Model, ApiError>(|txn| {
Box::pin(async move {
let existing = reminder_task::Entity::find_by_id(id)
.filter(reminder_task::Column::CreatorId.eq(user_id))
.one(txn)
.await?
.ok_or_else(|| ApiError::NotFound("Not found".to_string()))?;
// Handle recurrence rule
let mut rule_id = existing.recurrence_rule_id;
if let Some(rule_input) = body.recurrence_rule {
if let Some(existing_rule_id) = rule_id {
// Update existing rule
let mut rule: recurrence_rule::ActiveModel =
recurrence_rule::Entity::find_by_id(existing_rule_id)
.one(txn)
.await?
.ok_or_else(|| ApiError::Internal("Rule not found".to_string()))?
.into();
rule.r#type = Set(parse_recurrence_type(&rule_input.r#type)?);
rule.interval = Set(rule_input.interval.unwrap_or(1));
rule.by_weekday = Set(rule_input.by_weekday);
rule.by_monthday = Set(rule_input.by_monthday);
rule.timezone =
Set(rule_input.timezone.unwrap_or("Asia/Shanghai".to_string()));
rule.updated_at = Set(now);
rule.update(txn).await?;
} else {
// Create new rule
let rule = recurrence_rule::ActiveModel {
id: Set(Uuid::new_v4()),
r#type: Set(parse_recurrence_type(&rule_input.r#type)?),
interval: Set(rule_input.interval.unwrap_or(1)),
by_weekday: Set(rule_input.by_weekday),
by_monthday: Set(rule_input.by_monthday),
timezone: Set(rule_input
.timezone
.unwrap_or("Asia/Shanghai".to_string())),
created_at: Set(now),
updated_at: Set(now),
};
let created = rule.insert(txn).await?;
rule_id = Some(created.id);
}
} else if let Some(existing_rule_id) = rule_id {
// Delete existing rule
recurrence_rule::Entity::delete_by_id(existing_rule_id)
.exec(txn)
.await?;
rule_id = None;
}
// Delete existing recipients and create new ones
reminder_task_recipient::Entity::delete_many()
.filter(reminder_task_recipient::Column::TaskId.eq(id))
.exec(txn)
.await?;
for recipient_id in body.recipient_ids {
let new_recipient = reminder_task_recipient::ActiveModel {
task_id: Set(id),
user_id: Set(recipient_id),
};
new_recipient.insert(txn).await?;
}
// Delete existing offsets and create new ones
reminder_offset::Entity::delete_many()
.filter(reminder_offset::Column::TargetType.eq(TargetType::ReminderTask))
.filter(reminder_offset::Column::TargetId.eq(id))
.exec(txn)
.await?;
if let Some(offsets) = body.offsets {
for offset in offsets {
let new_offset = reminder_offset::ActiveModel {
id: Set(Uuid::new_v4()),
target_type: Set(TargetType::ReminderTask),
target_id: Set(id),
offset_minutes: Set(offset.offset_minutes),
channel_inapp: Set(offset.channel_inapp.unwrap_or(true)),
channel_bark: Set(offset.channel_bark.unwrap_or(false)),
created_at: Set(now),
bark_title: Set(offset.bark_title),
bark_subtitle: Set(offset.bark_subtitle),
bark_body_markdown: Set(offset.bark_body_markdown),
bark_level: Set(offset.bark_level),
bark_icon: Set(offset.bark_icon),
};
new_offset.insert(txn).await?;
}
}
// Update task
let mut active: reminder_task::ActiveModel = existing.into();
active.title = Set(body.title);
active.description = Set(body.description);
active.due_at = Set(body.due_at.fixed_offset());
active.recurrence_rule_id = Set(rule_id);
active.updated_at = Set(now);
let updated = active.update(txn).await?;
Ok(updated)
})
})
.await
.map_err(|e| match e {
sea_orm::TransactionError::Connection(e) => ApiError::Internal(e.to_string()),
sea_orm::TransactionError::Transaction(e) => e,
})?;
// 触发通知重新生成
let _ = app_data
.send_worker_command(WorkerCommand::GenerateNotifications {
target_type: TargetType::ReminderTask,
target_id: result.id,
})
.await;
Ok(HttpResponse::Ok().json(result))
}
#[delete("/{id}")]
async fn delete_task(
app_data: web::Data<AppData>,
auth: AuthUser,
path: web::Path<Uuid>,
) -> Result<impl Responder, ApiError> {
let id = path.into_inner();
let result = reminder_task::Entity::delete_many()
.filter(reminder_task::Column::Id.eq(id))
.filter(reminder_task::Column::CreatorId.eq(auth.user_id))
.exec(&app_data.db)
.await?;
if result.rows_affected == 0 {
return Err(ApiError::NotFound("Not found".to_string()));
}
// Delete offsets
reminder_offset::Entity::delete_many()
.filter(reminder_offset::Column::TargetType.eq(TargetType::ReminderTask))
.filter(reminder_offset::Column::TargetId.eq(id))
.exec(&app_data.db)
.await?;
// Delete recipients
reminder_task_recipient::Entity::delete_many()
.filter(reminder_task_recipient::Column::TaskId.eq(id))
.exec(&app_data.db)
.await?;
Ok(HttpResponse::Ok().json(OkResponse { ok: true }))
}
fn parse_recurrence_type(s: &str) -> Result<RecurrenceType, ApiError> {
match s {
"hourly" => Ok(RecurrenceType::Hourly),
"daily" => Ok(RecurrenceType::Daily),
"weekly" => Ok(RecurrenceType::Weekly),
"monthly" => Ok(RecurrenceType::Monthly),
"yearly" => Ok(RecurrenceType::Yearly),
_ => Err(ApiError::BadRequest("Invalid recurrence type".to_string())),
}
}
pub fn routes() -> Scope {
web::scope("/api/reminder-tasks")
.service(list_tasks)
.service(create_task)
.service(get_task)
.service(update_task)
.service(delete_task)
}

View File

@@ -0,0 +1,432 @@
use actix_web::{HttpResponse, Responder, Scope, delete, get, post, put, web};
use sea_orm::{
ActiveModelTrait, ColumnTrait, EntityTrait, QueryFilter, QueryOrder, Set, TransactionTrait,
};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use crate::app_data::AppData;
use crate::entity::sea_orm_active_enums::{RecurrenceType, TargetType};
use crate::entity::{recurrence_rule, reminder_offset, todo};
use crate::error::ApiError;
use crate::middleware::auth::AuthUser;
use crate::timer::WorkerCommand;
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct RecurrenceRuleInput {
pub r#type: String,
pub interval: Option<i32>,
pub by_weekday: Option<i32>,
pub by_monthday: Option<i32>,
pub timezone: Option<String>,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct OffsetInput {
pub offset_minutes: i32,
pub channel_inapp: Option<bool>,
pub channel_bark: Option<bool>,
}
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct TodoInput {
pub title: String,
pub description: Option<String>,
pub due_at: chrono::DateTime<chrono::Utc>,
pub recurrence_rule: Option<RecurrenceRuleInput>,
pub offsets: Option<Vec<OffsetInput>>,
}
#[derive(Debug, Serialize)]
pub struct OkResponse {
pub ok: bool,
}
#[get("")]
async fn list_todos(
app_data: web::Data<AppData>,
auth: AuthUser,
) -> Result<impl Responder, ApiError> {
let items = todo::Entity::find()
.filter(todo::Column::OwnerId.eq(auth.user_id))
.order_by_asc(todo::Column::DueAt)
.find_also_related(recurrence_rule::Entity)
.all(&app_data.db)
.await?;
let todo_ids: Vec<Uuid> = items.iter().map(|(t, _)| t.id).collect();
let offsets = reminder_offset::Entity::find()
.filter(reminder_offset::Column::TargetType.eq(TargetType::Todo))
.filter(reminder_offset::Column::TargetId.is_in(todo_ids))
.all(&app_data.db)
.await?;
let result: Vec<serde_json::Value> = items
.into_iter()
.map(|(t, rule)| {
let todo_offsets: Vec<_> = offsets
.iter()
.filter(|o| o.target_id == t.id)
.cloned()
.collect();
serde_json::json!({
"id": t.id,
"ownerId": t.owner_id,
"title": t.title,
"description": t.description,
"dueAt": t.due_at,
"recurrenceRuleId": t.recurrence_rule_id,
"createdAt": t.created_at,
"updatedAt": t.updated_at,
"checkInAt": t.check_in_at,
"checkInCount": t.check_in_count,
"isCheckedIn": t.is_checked_in,
"recurrenceRule": rule,
"offsets": todo_offsets,
})
})
.collect();
Ok(HttpResponse::Ok().json(result))
}
#[post("")]
async fn create_todo(
app_data: web::Data<AppData>,
auth: AuthUser,
body: web::Json<TodoInput>,
) -> Result<impl Responder, ApiError> {
if body.title.is_empty() {
return Err(ApiError::BadRequest("Invalid payload".to_string()));
}
let now = chrono::Utc::now().fixed_offset();
let body = body.into_inner();
let user_id = auth.user_id;
let result = app_data
.db
.transaction::<_, todo::Model, ApiError>(|txn| {
Box::pin(async move {
// Create recurrence rule if provided
let rule_id = if let Some(rule_input) = body.recurrence_rule {
let rule = recurrence_rule::ActiveModel {
id: Set(Uuid::new_v4()),
r#type: Set(parse_recurrence_type(&rule_input.r#type)?),
interval: Set(rule_input.interval.unwrap_or(1)),
by_weekday: Set(rule_input.by_weekday),
by_monthday: Set(rule_input.by_monthday),
timezone: Set(rule_input.timezone.unwrap_or("Asia/Shanghai".to_string())),
created_at: Set(now),
updated_at: Set(now),
};
let created = rule.insert(txn).await?;
Some(created.id)
} else {
None
};
// Create todo
let new_todo = todo::ActiveModel {
id: Set(Uuid::new_v4()),
owner_id: Set(user_id),
title: Set(body.title),
description: Set(body.description),
due_at: Set(body.due_at.fixed_offset()),
recurrence_rule_id: Set(rule_id),
created_at: Set(now),
updated_at: Set(now),
check_in_at: Set(None),
check_in_count: Set(0),
is_checked_in: Set(false),
};
let created_todo = new_todo.insert(txn).await?;
// Create offsets
if let Some(offsets) = body.offsets {
for offset in offsets {
let new_offset = reminder_offset::ActiveModel {
id: Set(Uuid::new_v4()),
target_type: Set(TargetType::Todo),
target_id: Set(created_todo.id),
offset_minutes: Set(offset.offset_minutes),
channel_inapp: Set(offset.channel_inapp.unwrap_or(true)),
channel_bark: Set(offset.channel_bark.unwrap_or(false)),
created_at: Set(now),
bark_title: Set(None),
bark_subtitle: Set(None),
bark_body_markdown: Set(None),
bark_level: Set(None),
bark_icon: Set(None),
};
new_offset.insert(txn).await?;
}
}
Ok(created_todo)
})
})
.await
.map_err(|e| match e {
sea_orm::TransactionError::Connection(e) => ApiError::Internal(e.to_string()),
sea_orm::TransactionError::Transaction(e) => e,
})?;
// 触发通知生成
let _ = app_data
.send_worker_command(WorkerCommand::GenerateNotifications {
target_type: TargetType::Todo,
target_id: result.id,
})
.await;
Ok(HttpResponse::Ok().json(result))
}
#[get("/{id}")]
async fn get_todo(
app_data: web::Data<AppData>,
auth: AuthUser,
path: web::Path<Uuid>,
) -> Result<impl Responder, ApiError> {
let id = path.into_inner();
let (t, rule) = todo::Entity::find_by_id(id)
.filter(todo::Column::OwnerId.eq(auth.user_id))
.find_also_related(recurrence_rule::Entity)
.one(&app_data.db)
.await?
.ok_or_else(|| ApiError::NotFound("Not found".to_string()))?;
let offsets = reminder_offset::Entity::find()
.filter(reminder_offset::Column::TargetType.eq(TargetType::Todo))
.filter(reminder_offset::Column::TargetId.eq(t.id))
.all(&app_data.db)
.await?;
let result = serde_json::json!({
"id": t.id,
"ownerId": t.owner_id,
"title": t.title,
"description": t.description,
"dueAt": t.due_at,
"recurrenceRuleId": t.recurrence_rule_id,
"createdAt": t.created_at,
"updatedAt": t.updated_at,
"checkInAt": t.check_in_at,
"checkInCount": t.check_in_count,
"isCheckedIn": t.is_checked_in,
"recurrenceRule": rule,
"offsets": offsets,
});
Ok(HttpResponse::Ok().json(result))
}
#[put("/{id}")]
async fn update_todo(
app_data: web::Data<AppData>,
auth: AuthUser,
path: web::Path<Uuid>,
body: web::Json<TodoInput>,
) -> Result<impl Responder, ApiError> {
let id = path.into_inner();
if body.title.is_empty() {
return Err(ApiError::BadRequest("Invalid payload".to_string()));
}
let now = chrono::Utc::now().fixed_offset();
let body = body.into_inner();
let user_id = auth.user_id;
let result = app_data
.db
.transaction::<_, todo::Model, ApiError>(|txn| {
Box::pin(async move {
let existing = todo::Entity::find_by_id(id)
.filter(todo::Column::OwnerId.eq(user_id))
.one(txn)
.await?
.ok_or_else(|| ApiError::NotFound("Not found".to_string()))?;
// Handle recurrence rule
let mut rule_id = existing.recurrence_rule_id;
if let Some(rule_input) = body.recurrence_rule {
if let Some(existing_rule_id) = rule_id {
// Update existing rule
let mut rule: recurrence_rule::ActiveModel =
recurrence_rule::Entity::find_by_id(existing_rule_id)
.one(txn)
.await?
.ok_or_else(|| ApiError::Internal("Rule not found".to_string()))?
.into();
rule.r#type = Set(parse_recurrence_type(&rule_input.r#type)?);
rule.interval = Set(rule_input.interval.unwrap_or(1));
rule.by_weekday = Set(rule_input.by_weekday);
rule.by_monthday = Set(rule_input.by_monthday);
rule.timezone =
Set(rule_input.timezone.unwrap_or("Asia/Shanghai".to_string()));
rule.updated_at = Set(now);
rule.update(txn).await?;
} else {
// Create new rule
let rule = recurrence_rule::ActiveModel {
id: Set(Uuid::new_v4()),
r#type: Set(parse_recurrence_type(&rule_input.r#type)?),
interval: Set(rule_input.interval.unwrap_or(1)),
by_weekday: Set(rule_input.by_weekday),
by_monthday: Set(rule_input.by_monthday),
timezone: Set(rule_input
.timezone
.unwrap_or("Asia/Shanghai".to_string())),
created_at: Set(now),
updated_at: Set(now),
};
let created = rule.insert(txn).await?;
rule_id = Some(created.id);
}
} else if let Some(existing_rule_id) = rule_id {
// Delete existing rule
recurrence_rule::Entity::delete_by_id(existing_rule_id)
.exec(txn)
.await?;
rule_id = None;
}
// Delete existing offsets and create new ones
reminder_offset::Entity::delete_many()
.filter(reminder_offset::Column::TargetType.eq(TargetType::Todo))
.filter(reminder_offset::Column::TargetId.eq(id))
.exec(txn)
.await?;
if let Some(offsets) = body.offsets {
for offset in offsets {
let new_offset = reminder_offset::ActiveModel {
id: Set(Uuid::new_v4()),
target_type: Set(TargetType::Todo),
target_id: Set(id),
offset_minutes: Set(offset.offset_minutes),
channel_inapp: Set(offset.channel_inapp.unwrap_or(true)),
channel_bark: Set(offset.channel_bark.unwrap_or(false)),
created_at: Set(now),
bark_title: Set(None),
bark_subtitle: Set(None),
bark_body_markdown: Set(None),
bark_level: Set(None),
bark_icon: Set(None),
};
new_offset.insert(txn).await?;
}
}
// Update todo
let mut active: todo::ActiveModel = existing.into();
active.title = Set(body.title);
active.description = Set(body.description);
active.due_at = Set(body.due_at.fixed_offset());
active.recurrence_rule_id = Set(rule_id);
active.updated_at = Set(now);
let updated = active.update(txn).await?;
Ok(updated)
})
})
.await
.map_err(|e| match e {
sea_orm::TransactionError::Connection(e) => ApiError::Internal(e.to_string()),
sea_orm::TransactionError::Transaction(e) => e,
})?;
// 触发通知重新生成
let _ = app_data
.send_worker_command(WorkerCommand::GenerateNotifications {
target_type: TargetType::Todo,
target_id: result.id,
})
.await;
Ok(HttpResponse::Ok().json(result))
}
#[post("/{id}/check-in")]
async fn check_in_todo(
app_data: web::Data<AppData>,
auth: AuthUser,
path: web::Path<Uuid>,
) -> Result<impl Responder, ApiError> {
let id = path.into_inner();
// Find and verify ownership
let existing = todo::Entity::find_by_id(id)
.filter(todo::Column::OwnerId.eq(auth.user_id))
.one(&app_data.db)
.await?
.ok_or_else(|| ApiError::NotFound("Not found".to_string()))?;
let now = chrono::Utc::now().fixed_offset();
// Update check-in fields
let mut active: todo::ActiveModel = existing.clone().into();
active.is_checked_in = Set(true);
active.check_in_at = Set(Some(now));
active.check_in_count = Set(existing.check_in_count + 1);
active.updated_at = Set(now);
let updated = active.update(&app_data.db).await?;
Ok(HttpResponse::Ok().json(updated))
}
#[delete("/{id}")]
async fn delete_todo(
app_data: web::Data<AppData>,
auth: AuthUser,
path: web::Path<Uuid>,
) -> Result<impl Responder, ApiError> {
let id = path.into_inner();
let result = todo::Entity::delete_many()
.filter(todo::Column::Id.eq(id))
.filter(todo::Column::OwnerId.eq(auth.user_id))
.exec(&app_data.db)
.await?;
if result.rows_affected == 0 {
return Err(ApiError::NotFound("Not found".to_string()));
}
// Delete offsets
reminder_offset::Entity::delete_many()
.filter(reminder_offset::Column::TargetType.eq(TargetType::Todo))
.filter(reminder_offset::Column::TargetId.eq(id))
.exec(&app_data.db)
.await?;
Ok(HttpResponse::Ok().json(OkResponse { ok: true }))
}
fn parse_recurrence_type(s: &str) -> Result<RecurrenceType, ApiError> {
match s {
"hourly" => Ok(RecurrenceType::Hourly),
"daily" => Ok(RecurrenceType::Daily),
"weekly" => Ok(RecurrenceType::Weekly),
"monthly" => Ok(RecurrenceType::Monthly),
"yearly" => Ok(RecurrenceType::Yearly),
_ => Err(ApiError::BadRequest("Invalid recurrence type".to_string())),
}
}
pub fn routes() -> Scope {
web::scope("/api/todos")
.service(list_todos)
.service(create_todo)
.service(get_todo)
.service(update_todo)
.service(check_in_todo)
.service(delete_todo)
}

View File

@@ -0,0 +1,64 @@
use actix_web::{HttpResponse, Responder, Scope, get, web};
use sea_orm::sea_query::extension::postgres::PgExpr;
use sea_orm::sea_query::Expr;
use sea_orm::{EntityTrait, QueryFilter, QueryOrder};
use serde::{Deserialize, Serialize};
use uuid::Uuid;
use crate::app_data::AppData;
use crate::entity::user;
use crate::error::ApiError;
use crate::middleware::auth::AuthUser;
#[derive(Debug, Deserialize)]
pub struct SearchQuery {
pub query: Option<String>,
}
#[derive(Debug, Serialize)]
pub struct UserResponse {
pub id: Uuid,
pub username: String,
pub avatar: Option<String>,
}
#[get("")]
async fn search_users(
app_data: web::Data<AppData>,
_auth: AuthUser,
query: web::Query<SearchQuery>,
) -> Result<impl Responder, ApiError> {
let mut q = user::Entity::find();
if let Some(search) = &query.query {
let search = search.trim();
if !search.is_empty() {
// 使用 ILIKE 进行不区分大小写的搜索PostgreSQL
let pattern = format!("%{}%", search);
q = q.filter(
Expr::col((user::Entity, user::Column::Username))
.ilike(&pattern)
);
}
}
let users = q
.order_by_asc(user::Column::Username)
.all(&app_data.db)
.await?;
let result: Vec<UserResponse> = users
.into_iter()
.map(|u| UserResponse {
id: u.id,
username: u.username,
avatar: u.avatar,
})
.collect();
Ok(HttpResponse::Ok().json(result))
}
pub fn routes() -> Scope {
web::scope("/api/users").service(search_users)
}

View File

@@ -0,0 +1,71 @@
use migration::{Migrator, MigratorTrait};
use sea_orm::{ConnectOptions, Database, DbConn};
use std::env;
use std::path::PathBuf;
use tokio::sync::mpsc;
use tracing::info;
use crate::timer::{NotificationWorker, SharedTimeWheel, WorkerCommand};
#[derive(Clone)]
pub struct AppData {
pub db: DbConn,
pub jwt_secret: String,
pub worker_tx: mpsc::Sender<WorkerCommand>,
/// 服务器基础 URL用于生成头像等资源的完整 URL
/// 本地调试: http://localhost:4000
/// 生产环境: https://notify.michaelandmeryl.xyz
pub base_url: String,
/// 上传文件的存储目录
pub upload_dir: PathBuf,
}
impl AppData {
pub async fn new() -> Result<Self, anyhow::Error> {
let url = env::var("DATABASE_URL")
.unwrap_or_else(|_| "postgres://postgres:postgres@localhost:5432/notify".to_string());
let mut opt = ConnectOptions::new(url);
opt.max_connections(10).sqlx_logging(false);
let db = Database::connect(opt).await?;
// 自动运行数据库迁移
info!("Running database migrations...");
Migrator::up(&db, None).await?;
info!("Database migrations completed.");
let jwt_secret = env::var("JWT_SECRET").unwrap_or_else(|_| "dev-secret".to_string());
// 从环境变量读取 BASE_URL默认为本地开发地址
let base_url = env::var("BASE_URL").unwrap_or_else(|_| "http://localhost:4000".to_string());
// 上传目录,默认为当前目录下的 uploads
let upload_dir = env::var("UPLOAD_DIR")
.map(PathBuf::from)
.unwrap_or_else(|_| PathBuf::from("./uploads"));
// 确保上传目录存在
tokio::fs::create_dir_all(&upload_dir).await?;
tokio::fs::create_dir_all(upload_dir.join("avatars")).await?;
// 创建并启动时间轮 Worker
let time_wheel = SharedTimeWheel::new();
let worker = NotificationWorker::new(db.clone(), time_wheel);
let worker_tx = worker.start().await;
Ok(Self {
db,
jwt_secret,
worker_tx,
base_url,
upload_dir,
})
}
/// 发送命令给 Worker
pub async fn send_worker_command(
&self,
cmd: WorkerCommand,
) -> Result<(), mpsc::error::SendError<WorkerCommand>> {
self.worker_tx.send(cmd).await
}
}

View File

@@ -0,0 +1,41 @@
//! `SeaORM` Entity, @generated by sea-orm-codegen 2.0
use super::sea_orm_active_enums::ChannelType;
use super::sea_orm_active_enums::NotificationStatus;
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel, Serialize, Deserialize)]
#[sea_orm(table_name = "delivery_log")]
#[sea_orm::model]
pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: Uuid,
pub notification_id: Uuid,
pub attempt_no: i32,
pub channel: ChannelType,
pub status: NotificationStatus,
#[sea_orm(column_type = "JsonBinary", nullable)]
pub response_meta: Option<Json>,
pub created_at: DateTimeWithTimeZone,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::notification::Entity",
from = "Column::NotificationId",
to = "super::notification::Column::Id",
on_update = "Cascade",
on_delete = "Cascade"
)]
Notification,
}
impl Related<super::notification::Entity> for Entity {
fn to() -> RelationDef {
Relation::Notification.def()
}
}
impl ActiveModelBehavior for ActiveModel {}

View File

@@ -0,0 +1,40 @@
//! `SeaORM` Entity, @generated by sea-orm-codegen 2.0
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel, Serialize, Deserialize)]
#[sea_orm(table_name = "invite")]
#[sea_orm::model]
pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: Uuid,
#[sea_orm(unique)]
pub code: String,
pub creator_id: Uuid,
pub max_uses: i32,
pub used_count: i32,
pub expires_at: DateTimeWithTimeZone,
pub revoked_at: Option<DateTimeWithTimeZone>,
pub created_at: DateTimeWithTimeZone,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::user::Entity",
from = "Column::CreatorId",
to = "super::user::Column::Id",
on_update = "Cascade",
on_delete = "Cascade"
)]
User,
}
impl Related<super::user::Entity> for Entity {
fn to() -> RelationDef {
Relation::User.def()
}
}
impl ActiveModelBehavior for ActiveModel {}

View File

@@ -0,0 +1,14 @@
//! `SeaORM` Entity, @generated by sea-orm-codegen 2.0
pub mod prelude;
pub mod delivery_log;
pub mod invite;
pub mod notification;
pub mod recurrence_rule;
pub mod reminder_offset;
pub mod reminder_task;
pub mod reminder_task_recipient;
pub mod sea_orm_active_enums;
pub mod todo;
pub mod user;

View File

@@ -0,0 +1,61 @@
//! `SeaORM` Entity, @generated by sea-orm-codegen 2.0
use super::sea_orm_active_enums::ChannelType;
use super::sea_orm_active_enums::NotificationStatus;
use super::sea_orm_active_enums::TargetType;
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel, Serialize, Deserialize)]
#[sea_orm(table_name = "notification")]
#[sea_orm::model]
#[serde(rename_all = "camelCase")]
pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: Uuid,
#[sea_orm(unique_key = "UQ_notification_recipient_target_trigger_channel")]
pub recipient_id: Uuid,
#[sea_orm(unique_key = "UQ_notification_recipient_target_trigger_channel")]
pub target_type: TargetType,
#[sea_orm(unique_key = "UQ_notification_recipient_target_trigger_channel")]
pub target_id: Uuid,
#[sea_orm(unique_key = "UQ_notification_recipient_target_trigger_channel")]
pub trigger_at: DateTimeWithTimeZone,
#[sea_orm(unique_key = "UQ_notification_recipient_target_trigger_channel")]
pub channel: ChannelType,
pub status: NotificationStatus,
pub locked_at: Option<DateTimeWithTimeZone>,
pub sent_at: Option<DateTimeWithTimeZone>,
pub read_at: Option<DateTimeWithTimeZone>,
pub created_at: DateTimeWithTimeZone,
pub updated_at: DateTimeWithTimeZone,
pub offset_id: Option<Uuid>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(has_many = "super::delivery_log::Entity")]
DeliveryLog,
#[sea_orm(
belongs_to = "super::user::Entity",
from = "Column::RecipientId",
to = "super::user::Column::Id",
on_update = "Cascade",
on_delete = "Cascade"
)]
User,
}
impl Related<super::delivery_log::Entity> for Entity {
fn to() -> RelationDef {
Relation::DeliveryLog.def()
}
}
impl Related<super::user::Entity> for Entity {
fn to() -> RelationDef {
Relation::User.def()
}
}
impl ActiveModelBehavior for ActiveModel {}

View File

@@ -0,0 +1,11 @@
//! `SeaORM` Entity, @generated by sea-orm-codegen 2.0
pub use super::delivery_log::Entity as DeliveryLog;
pub use super::invite::Entity as Invite;
pub use super::notification::Entity as Notification;
pub use super::recurrence_rule::Entity as RecurrenceRule;
pub use super::reminder_offset::Entity as ReminderOffset;
pub use super::reminder_task::Entity as ReminderTask;
pub use super::reminder_task_recipient::Entity as ReminderTaskRecipient;
pub use super::todo::Entity as Todo;
pub use super::user::Entity as User;

View File

@@ -0,0 +1,43 @@
//! `SeaORM` Entity, @generated by sea-orm-codegen 2.0
use super::sea_orm_active_enums::RecurrenceType;
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel, Serialize, Deserialize)]
#[sea_orm(table_name = "recurrence_rule")]
#[sea_orm::model]
#[serde(rename_all = "camelCase")]
pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: Uuid,
pub r#type: RecurrenceType,
pub interval: i32,
pub by_weekday: Option<i32>,
pub by_monthday: Option<i32>,
pub timezone: String,
pub created_at: DateTimeWithTimeZone,
pub updated_at: DateTimeWithTimeZone,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(has_many = "super::reminder_task::Entity")]
ReminderTask,
#[sea_orm(has_many = "super::todo::Entity")]
Todo,
}
impl Related<super::reminder_task::Entity> for Entity {
fn to() -> RelationDef {
Relation::ReminderTask.def()
}
}
impl Related<super::todo::Entity> for Entity {
fn to() -> RelationDef {
Relation::Todo.def()
}
}
impl ActiveModelBehavior for ActiveModel {}

View File

@@ -0,0 +1,31 @@
//! `SeaORM` Entity, @generated by sea-orm-codegen 2.0
use super::sea_orm_active_enums::TargetType;
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel, Serialize, Deserialize)]
#[sea_orm(table_name = "reminder_offset")]
#[sea_orm::model]
#[serde(rename_all = "camelCase")]
pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: Uuid,
pub target_type: TargetType,
pub target_id: Uuid,
pub offset_minutes: i32,
pub channel_inapp: bool,
pub channel_bark: bool,
pub created_at: DateTimeWithTimeZone,
pub bark_title: Option<String>,
pub bark_subtitle: Option<String>,
#[sea_orm(column_type = "Text", nullable)]
pub bark_body_markdown: Option<String>,
pub bark_level: Option<String>,
pub bark_icon: Option<String>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {}
impl ActiveModelBehavior for ActiveModel {}

View File

@@ -0,0 +1,68 @@
//! `SeaORM` Entity, @generated by sea-orm-codegen 2.0
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel, Serialize, Deserialize)]
#[sea_orm(table_name = "reminder_task")]
#[sea_orm::model]
pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: Uuid,
pub creator_id: Uuid,
pub title: String,
pub description: Option<String>,
pub due_at: DateTimeWithTimeZone,
pub recurrence_rule_id: Option<Uuid>,
pub created_at: DateTimeWithTimeZone,
pub updated_at: DateTimeWithTimeZone,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::recurrence_rule::Entity",
from = "Column::RecurrenceRuleId",
to = "super::recurrence_rule::Column::Id",
on_update = "Cascade",
on_delete = "SetNull"
)]
RecurrenceRule,
#[sea_orm(has_many = "super::reminder_task_recipient::Entity")]
ReminderTaskRecipient,
#[sea_orm(
belongs_to = "super::user::Entity",
from = "Column::CreatorId",
to = "super::user::Column::Id",
on_update = "Cascade",
on_delete = "Cascade"
)]
User,
}
impl Related<super::recurrence_rule::Entity> for Entity {
fn to() -> RelationDef {
Relation::RecurrenceRule.def()
}
}
impl Related<super::reminder_task_recipient::Entity> for Entity {
fn to() -> RelationDef {
Relation::ReminderTaskRecipient.def()
}
}
impl Related<super::user::Entity> for Entity {
fn to() -> RelationDef {
super::reminder_task_recipient::Relation::User.def()
}
fn via() -> Option<RelationDef> {
Some(
super::reminder_task_recipient::Relation::ReminderTask
.def()
.rev(),
)
}
}
impl ActiveModelBehavior for ActiveModel {}

View File

@@ -0,0 +1,49 @@
//! `SeaORM` Entity, @generated by sea-orm-codegen 2.0
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel, Serialize, Deserialize)]
#[sea_orm(table_name = "reminder_task_recipient")]
#[sea_orm::model]
#[serde(rename_all = "camelCase")]
pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub task_id: Uuid,
#[sea_orm(primary_key, auto_increment = false)]
pub user_id: Uuid,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::reminder_task::Entity",
from = "Column::TaskId",
to = "super::reminder_task::Column::Id",
on_update = "Cascade",
on_delete = "Cascade"
)]
ReminderTask,
#[sea_orm(
belongs_to = "super::user::Entity",
from = "Column::UserId",
to = "super::user::Column::Id",
on_update = "Cascade",
on_delete = "Cascade"
)]
User,
}
impl Related<super::reminder_task::Entity> for Entity {
fn to() -> RelationDef {
Relation::ReminderTask.def()
}
}
impl Related<super::user::Entity> for Entity {
fn to() -> RelationDef {
Relation::User.def()
}
}
impl ActiveModelBehavior for ActiveModel {}

View File

@@ -0,0 +1,51 @@
//! `SeaORM` Entity, @generated by sea-orm-codegen 2.0
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, PartialEq, Eq, EnumIter, DeriveActiveEnum, Serialize, Deserialize)]
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "channel_type")]
pub enum ChannelType {
#[sea_orm(string_value = "inapp")]
Inapp,
#[sea_orm(string_value = "bark")]
Bark,
}
#[derive(Debug, Clone, PartialEq, Eq, EnumIter, DeriveActiveEnum, Serialize, Deserialize)]
#[sea_orm(
rs_type = "String",
db_type = "Enum",
enum_name = "notification_status"
)]
pub enum NotificationStatus {
#[sea_orm(string_value = "pending")]
Pending,
#[sea_orm(string_value = "queued")]
Queued,
#[sea_orm(string_value = "sent")]
Sent,
#[sea_orm(string_value = "failed")]
Failed,
}
#[derive(Debug, Clone, PartialEq, Eq, EnumIter, DeriveActiveEnum, Serialize, Deserialize)]
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "recurrence_type")]
pub enum RecurrenceType {
#[sea_orm(string_value = "hourly")]
Hourly,
#[sea_orm(string_value = "daily")]
Daily,
#[sea_orm(string_value = "weekly")]
Weekly,
#[sea_orm(string_value = "monthly")]
Monthly,
#[sea_orm(string_value = "yearly")]
Yearly,
}
#[derive(Debug, Clone, PartialEq, Eq, EnumIter, DeriveActiveEnum, Serialize, Deserialize)]
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "target_type")]
pub enum TargetType {
#[sea_orm(string_value = "todo")]
Todo,
#[sea_orm(string_value = "reminder_task")]
ReminderTask,
}

View File

@@ -0,0 +1,56 @@
//! `SeaORM` Entity, @generated by sea-orm-codegen 2.0
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel, Serialize, Deserialize)]
#[sea_orm(table_name = "todo")]
#[sea_orm::model]
pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: Uuid,
pub owner_id: Uuid,
pub title: String,
pub description: Option<String>,
pub due_at: DateTimeWithTimeZone,
pub recurrence_rule_id: Option<Uuid>,
pub created_at: DateTimeWithTimeZone,
pub updated_at: DateTimeWithTimeZone,
pub check_in_at: Option<DateTimeWithTimeZone>,
pub check_in_count: i32,
pub is_checked_in: bool,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::recurrence_rule::Entity",
from = "Column::RecurrenceRuleId",
to = "super::recurrence_rule::Column::Id",
on_update = "Cascade",
on_delete = "SetNull"
)]
RecurrenceRule,
#[sea_orm(
belongs_to = "super::user::Entity",
from = "Column::OwnerId",
to = "super::user::Column::Id",
on_update = "Cascade",
on_delete = "Cascade"
)]
User,
}
impl Related<super::recurrence_rule::Entity> for Entity {
fn to() -> RelationDef {
Relation::RecurrenceRule.def()
}
}
impl Related<super::user::Entity> for Entity {
fn to() -> RelationDef {
Relation::User.def()
}
}
impl ActiveModelBehavior for ActiveModel {}

View File

@@ -0,0 +1,79 @@
//! `SeaORM` Entity, @generated by sea-orm-codegen 2.0
use sea_orm::entity::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, PartialEq, Eq, DeriveEntityModel, Serialize, Deserialize)]
#[sea_orm(table_name = "user")]
#[sea_orm::model]
pub struct Model {
#[sea_orm(primary_key, auto_increment = false)]
pub id: Uuid,
#[sea_orm(unique)]
pub username: String,
#[serde(skip_serializing)]
pub password_hash: String,
pub avatar: Option<String>,
pub timezone: String,
pub bark_url: Option<String>,
pub inapp_enabled: bool,
pub bark_enabled: bool,
pub created_at: DateTimeWithTimeZone,
pub updated_at: DateTimeWithTimeZone,
pub invite_id: Option<Uuid>,
}
#[derive(Copy, Clone, Debug, EnumIter, DeriveRelation)]
pub enum Relation {
#[sea_orm(
belongs_to = "super::invite::Entity",
from = "Column::InviteId",
to = "super::invite::Column::Id",
on_update = "Cascade",
on_delete = "SetNull"
)]
Invite,
#[sea_orm(has_many = "super::notification::Entity")]
Notification,
#[sea_orm(has_many = "super::reminder_task::Entity")]
ReminderTask,
#[sea_orm(has_many = "super::reminder_task_recipient::Entity")]
ReminderTaskRecipient,
#[sea_orm(has_many = "super::todo::Entity")]
Todo,
}
impl Related<super::invite::Entity> for Entity {
fn to() -> RelationDef {
Relation::Invite.def()
}
}
impl Related<super::notification::Entity> for Entity {
fn to() -> RelationDef {
Relation::Notification.def()
}
}
impl Related<super::reminder_task_recipient::Entity> for Entity {
fn to() -> RelationDef {
Relation::ReminderTaskRecipient.def()
}
}
impl Related<super::todo::Entity> for Entity {
fn to() -> RelationDef {
Relation::Todo.def()
}
}
impl Related<super::reminder_task::Entity> for Entity {
fn to() -> RelationDef {
super::reminder_task_recipient::Relation::ReminderTask.def()
}
fn via() -> Option<RelationDef> {
Some(super::reminder_task_recipient::Relation::User.def().rev())
}
}
impl ActiveModelBehavior for ActiveModel {}

69
backend_rust/src/error.rs Normal file
View File

@@ -0,0 +1,69 @@
use actix_web::{HttpResponse, ResponseError, http::StatusCode};
use serde::Serialize;
use std::fmt;
#[derive(Debug)]
pub enum ApiError {
BadRequest(String),
Unauthorized(String),
Forbidden(String),
NotFound(String),
Conflict(String),
Internal(String),
}
#[derive(Serialize)]
struct ErrorResponse {
error: String,
}
impl fmt::Display for ApiError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
ApiError::BadRequest(msg) => write!(f, "{}", msg),
ApiError::Unauthorized(msg) => write!(f, "{}", msg),
ApiError::Forbidden(msg) => write!(f, "{}", msg),
ApiError::NotFound(msg) => write!(f, "{}", msg),
ApiError::Conflict(msg) => write!(f, "{}", msg),
ApiError::Internal(msg) => write!(f, "{}", msg),
}
}
}
impl ResponseError for ApiError {
fn error_response(&self) -> HttpResponse {
let error_response = ErrorResponse {
error: self.to_string(),
};
HttpResponse::build(self.status_code()).json(error_response)
}
fn status_code(&self) -> StatusCode {
match self {
ApiError::BadRequest(_) => StatusCode::BAD_REQUEST,
ApiError::Unauthorized(_) => StatusCode::UNAUTHORIZED,
ApiError::Forbidden(_) => StatusCode::FORBIDDEN,
ApiError::NotFound(_) => StatusCode::NOT_FOUND,
ApiError::Conflict(_) => StatusCode::CONFLICT,
ApiError::Internal(_) => StatusCode::INTERNAL_SERVER_ERROR,
}
}
}
impl From<sea_orm::DbErr> for ApiError {
fn from(err: sea_orm::DbErr) -> Self {
ApiError::Internal(err.to_string())
}
}
impl From<bcrypt::BcryptError> for ApiError {
fn from(err: bcrypt::BcryptError) -> Self {
ApiError::Internal(err.to_string())
}
}
impl From<jsonwebtoken::errors::Error> for ApiError {
fn from(_: jsonwebtoken::errors::Error) -> Self {
ApiError::Unauthorized("Invalid token".to_string())
}
}

6
backend_rust/src/lib.rs Normal file
View File

@@ -0,0 +1,6 @@
pub mod api;
pub mod app_data;
pub mod entity;
pub mod error;
pub mod middleware;
pub mod timer;

64
backend_rust/src/main.rs Normal file
View File

@@ -0,0 +1,64 @@
use actix_cors::Cors;
use actix_files::Files;
use actix_web::{App, HttpServer, web};
use backend_rust::api::{auth, health, invites, me, notifications, reminder_tasks, todos, users};
use backend_rust::app_data::AppData;
use std::env;
use std::path::PathBuf;
use tracing::{error, info};
#[actix_web::main]
async fn main() -> std::io::Result<()> {
let format = tracing_subscriber::fmt::format().pretty();
tracing_subscriber::fmt().event_format(format).init();
// 从环境变量读取配置,使用默认值
let host = env::var("HOST").unwrap_or_else(|_| "0.0.0.0".to_string());
let port: u16 = env::var("PORT")
.unwrap_or_else(|_| "4000".to_string())
.parse()
.expect("PORT must be a valid number");
// 上传目录配置
let upload_dir = env::var("UPLOAD_DIR")
.map(PathBuf::from)
.unwrap_or_else(|_| PathBuf::from("./uploads"));
let app_data = match AppData::new().await {
Ok(app_data) => web::Data::new(app_data),
Err(e) => {
error!("Failed to connect to database: {}", e);
std::process::exit(1);
}
};
info!("Starting server on {}:{}", host, port);
info!("Upload directory: {:?}", upload_dir);
HttpServer::new(move || {
let cors = Cors::default()
.allow_any_header()
.allow_any_method()
.allow_any_origin();
App::new()
.wrap(cors)
.app_data(app_data.clone())
.configure(configure_routes)
// 配置静态文件服务,用于提供上传的文件(如头像)
.service(Files::new("/uploads", upload_dir.clone()).show_files_listing())
})
.bind((host, port))?
.run()
.await
}
fn configure_routes(m: &mut web::ServiceConfig) {
m.service(health::routes())
.service(auth::routes())
.service(invites::routes())
.service(me::routes())
.service(notifications::routes())
.service(todos::routes())
.service(reminder_tasks::routes())
.service(users::routes());
}

View File

@@ -0,0 +1,76 @@
use actix_web::{FromRequest, HttpRequest, dev::Payload, web};
use jsonwebtoken::{DecodingKey, Validation, decode};
use serde::{Deserialize, Serialize};
use std::future::{Ready, ready};
use uuid::Uuid;
use crate::app_data::AppData;
use crate::error::ApiError;
#[derive(Debug, Serialize, Deserialize)]
pub struct Claims {
pub user_id: Uuid,
pub exp: usize,
}
#[derive(Debug, Clone)]
pub struct AuthUser {
pub user_id: Uuid,
}
impl FromRequest for AuthUser {
type Error = ApiError;
type Future = Ready<Result<Self, Self::Error>>;
fn from_request(req: &HttpRequest, _payload: &mut Payload) -> Self::Future {
let result = extract_auth_user(req);
ready(result)
}
}
fn extract_auth_user(req: &HttpRequest) -> Result<AuthUser, ApiError> {
let app_data = req
.app_data::<web::Data<AppData>>()
.ok_or_else(|| ApiError::Internal("AppData not found".to_string()))?;
let auth_header = req
.headers()
.get("Authorization")
.and_then(|h| h.to_str().ok())
.ok_or_else(|| ApiError::Unauthorized("Missing authorization header".to_string()))?;
let token = auth_header
.strip_prefix("Bearer ")
.ok_or_else(|| ApiError::Unauthorized("Invalid authorization header".to_string()))?;
let token_data = decode::<Claims>(
token,
&DecodingKey::from_secret(app_data.jwt_secret.as_bytes()),
&Validation::default(),
)
.map_err(|_| ApiError::Unauthorized("Invalid token".to_string()))?;
Ok(AuthUser {
user_id: token_data.claims.user_id,
})
}
pub fn create_token(user_id: Uuid, secret: &str) -> Result<String, jsonwebtoken::errors::Error> {
use jsonwebtoken::{EncodingKey, Header, encode};
let expiration = chrono::Utc::now()
.checked_add_signed(chrono::Duration::days(7))
.expect("valid timestamp")
.timestamp() as usize;
let claims = Claims {
user_id,
exp: expiration,
};
encode(
&Header::default(),
&claims,
&EncodingKey::from_secret(secret.as_bytes()),
)
}

View File

@@ -0,0 +1 @@
pub mod auth;

View File

@@ -0,0 +1,7 @@
pub mod recurrence;
pub mod time_wheel;
pub mod worker;
pub use recurrence::calculate_next_due;
pub use time_wheel::{SharedTimeWheel, TimerTask, TimeWheel};
pub use worker::{NotificationWorker, WorkerCommand};

View File

@@ -0,0 +1,207 @@
use chrono::{DateTime, Datelike, Duration, FixedOffset, NaiveDateTime, TimeZone, Weekday};
use crate::entity::recurrence_rule;
use crate::entity::sea_orm_active_enums::RecurrenceType;
/// 根据循环规则计算下一次触发时间
pub fn calculate_next_due(
rule: &recurrence_rule::Model,
current_due: DateTime<FixedOffset>,
) -> Option<DateTime<FixedOffset>> {
let interval = rule.interval.max(1) as i64;
let offset = current_due.offset().clone();
match rule.r#type {
RecurrenceType::Hourly => {
// 每 N 小时
Some(current_due + Duration::hours(interval))
}
RecurrenceType::Daily => {
// 每 N 天
Some(current_due + Duration::days(interval))
}
RecurrenceType::Weekly => {
// 每 N 周,可选指定星期几
if let Some(weekday) = rule.by_weekday {
// 找到下一个指定的星期几
let target_weekday = num_to_weekday(weekday);
let next = current_due + Duration::weeks(interval);
// 调整到目标星期几
let current_weekday = next.weekday();
let days_ahead = (target_weekday.num_days_from_monday() as i64
- current_weekday.num_days_from_monday() as i64
+ 7)
% 7;
if days_ahead == 0 {
// 同一天,保持当前时间
Some(next)
} else {
Some(next + Duration::days(days_ahead))
}
} else {
Some(current_due + Duration::weeks(interval))
}
}
RecurrenceType::Monthly => {
// 每 N 月,可选指定几号
let target_day = rule.by_monthday.unwrap_or(current_due.day() as i32) as u32;
let mut year = current_due.year();
let mut month = current_due.month() as i32 + interval as i32;
// 处理年份进位
while month > 12 {
month -= 12;
year += 1;
}
// 处理月份天数不足的情况(如 2 月没有 31 号)
let day = target_day.min(days_in_month(year, month as u32));
let naive = NaiveDateTime::new(
chrono::NaiveDate::from_ymd_opt(year, month as u32, day)?,
current_due.time(),
);
Some(offset.from_local_datetime(&naive).single()?)
}
RecurrenceType::Yearly => {
// 每 N 年
let year = current_due.year() + interval as i32;
let month = current_due.month();
let day = current_due.day().min(days_in_month(year, month));
let naive = NaiveDateTime::new(
chrono::NaiveDate::from_ymd_opt(year, month, day)?,
current_due.time(),
);
Some(offset.from_local_datetime(&naive).single()?)
}
}
}
fn num_to_weekday(num: i32) -> Weekday {
match num % 7 {
0 => Weekday::Sun,
1 => Weekday::Mon,
2 => Weekday::Tue,
3 => Weekday::Wed,
4 => Weekday::Thu,
5 => Weekday::Fri,
6 => Weekday::Sat,
_ => Weekday::Mon,
}
}
fn days_in_month(year: i32, month: u32) -> u32 {
match month {
1 | 3 | 5 | 7 | 8 | 10 | 12 => 31,
4 | 6 | 9 | 11 => 30,
2 => {
if is_leap_year(year) {
29
} else {
28
}
}
_ => 30,
}
}
fn is_leap_year(year: i32) -> bool {
(year % 4 == 0 && year % 100 != 0) || (year % 400 == 0)
}
#[cfg(test)]
mod tests {
use super::*;
use chrono::{NaiveDate, Timelike};
use uuid::Uuid;
fn make_rule(
rule_type: RecurrenceType,
interval: i32,
by_weekday: Option<i32>,
by_monthday: Option<i32>,
) -> recurrence_rule::Model {
recurrence_rule::Model {
id: Uuid::new_v4(),
r#type: rule_type,
interval,
by_weekday,
by_monthday,
timezone: "Asia/Shanghai".to_string(),
created_at: chrono::Utc::now().fixed_offset(),
updated_at: chrono::Utc::now().fixed_offset(),
}
}
fn make_datetime(year: i32, month: u32, day: u32, hour: u32, min: u32) -> DateTime<FixedOffset> {
let naive = NaiveDate::from_ymd_opt(year, month, day)
.unwrap()
.and_hms_opt(hour, min, 0)
.unwrap();
FixedOffset::east_opt(0).unwrap().from_local_datetime(&naive).unwrap()
}
#[test]
fn test_hourly() {
let rule = make_rule(RecurrenceType::Hourly, 2, None, None);
let current = make_datetime(2024, 1, 15, 10, 30);
let next = calculate_next_due(&rule, current).unwrap();
assert_eq!(next.hour(), 12);
assert_eq!(next.minute(), 30);
}
#[test]
fn test_daily() {
let rule = make_rule(RecurrenceType::Daily, 3, None, None);
let current = make_datetime(2024, 1, 15, 10, 30);
let next = calculate_next_due(&rule, current).unwrap();
assert_eq!(next.day(), 18);
}
#[test]
fn test_weekly() {
let rule = make_rule(RecurrenceType::Weekly, 1, None, None);
let current = make_datetime(2024, 1, 15, 10, 30);
let next = calculate_next_due(&rule, current).unwrap();
assert_eq!(next.day(), 22);
}
#[test]
fn test_monthly() {
let rule = make_rule(RecurrenceType::Monthly, 1, None, Some(15));
let current = make_datetime(2024, 1, 15, 10, 30);
let next = calculate_next_due(&rule, current).unwrap();
assert_eq!(next.month(), 2);
assert_eq!(next.day(), 15);
}
#[test]
fn test_monthly_overflow() {
// 1月31号 -> 2月没有31号应该是28号或29号
let rule = make_rule(RecurrenceType::Monthly, 1, None, Some(31));
let current = make_datetime(2024, 1, 31, 10, 30);
let next = calculate_next_due(&rule, current).unwrap();
assert_eq!(next.month(), 2);
assert_eq!(next.day(), 29); // 2024 是闰年
}
#[test]
fn test_yearly() {
let rule = make_rule(RecurrenceType::Yearly, 1, None, None);
let current = make_datetime(2024, 6, 15, 10, 30);
let next = calculate_next_due(&rule, current).unwrap();
assert_eq!(next.year(), 2025);
assert_eq!(next.month(), 6);
assert_eq!(next.day(), 15);
}
}

View File

@@ -0,0 +1,395 @@
use std::collections::HashMap;
use std::sync::Arc;
use tokio::sync::RwLock;
use uuid::Uuid;
/// 时间轮任务
#[derive(Clone, Debug)]
pub struct TimerTask {
pub id: Uuid,
pub notification_id: Uuid,
pub trigger_at: i64, // Unix timestamp in seconds
}
/// 单层时间轮
struct WheelLevel {
slots: Vec<Vec<TimerTask>>,
current: usize,
slot_count: usize,
interval_secs: i64, // 每个槽位代表的秒数
}
impl WheelLevel {
fn new(slot_count: usize, interval_secs: i64) -> Self {
let slots = (0..slot_count).map(|_| Vec::new()).collect();
Self {
slots,
current: 0,
slot_count,
interval_secs,
}
}
/// 将任务添加到指定槽位
fn add_to_slot(&mut self, slot: usize, task: TimerTask) {
self.slots[slot].push(task);
}
/// 获取当前槽位的所有任务并清空
fn take_current(&mut self) -> Vec<TimerTask> {
std::mem::take(&mut self.slots[self.current])
}
/// 移动到下一个槽位,返回是否完成一圈
fn advance(&mut self) -> bool {
self.current = (self.current + 1) % self.slot_count;
self.current == 0
}
}
/// 多级时间轮
///
/// 结构:
/// - Level 0: 秒轮 (60 slots, 每槽 1 秒)
/// - Level 1: 分钟轮 (60 slots, 每槽 60 秒)
/// - Level 2: 小时轮 (24 slots, 每槽 3600 秒)
/// - Level 3: 天轮 (30 slots, 每槽 86400 秒)
pub struct TimeWheel {
levels: Vec<WheelLevel>,
current_time: i64, // 当前时间
task_index: HashMap<Uuid, Uuid>, // task_id -> notification_id 用于取消任务
}
impl TimeWheel {
pub fn new() -> Self {
let now = chrono::Utc::now().timestamp();
Self {
levels: vec![
WheelLevel::new(60, 1), // 秒轮: 60槽, 每槽1秒
WheelLevel::new(60, 60), // 分钟轮: 60槽, 每槽60秒
WheelLevel::new(24, 3600), // 小时轮: 24槽, 每槽1小时
WheelLevel::new(30, 86400), // 天轮: 30槽, 每槽1天
],
current_time: now,
task_index: HashMap::new(),
}
}
/// 计算任务应该放入哪一层的哪个槽位
///
/// 时间轮结构:
/// - Level 0 (秒轮): 60 slots, interval=1s, 覆盖 1-60s 的延迟
/// - Level 1 (分钟轮): 60 slots, interval=60s, 覆盖 60s-1h 的延迟
/// - Level 2 (小时轮): 24 slots, interval=3600s, 覆盖 1h-24h 的延迟
/// - Level 3 (天轮): 30 slots, interval=86400s, 覆盖 1d-30d 的延迟
///
/// 级联逻辑:当 Level N 完成一圈时,从 Level N+1 的当前槽取出任务降级
fn calculate_slot(&self, trigger_at: i64) -> Option<(usize, usize)> {
let delay = trigger_at - self.current_time;
if delay <= 0 {
// 已经过期,放入秒轮当前槽位(立即执行)
return Some((0, self.levels[0].current));
}
// 计算每个层级的总覆盖范围
let mut level_ranges: Vec<i64> = Vec::new();
let mut cumulative = 0i64;
for level in &self.levels {
cumulative += level.slot_count as i64 * level.interval_secs;
level_ranges.push(cumulative);
}
// 找到合适的层级
let mut prev_range = 0i64;
for (level_idx, &range) in level_ranges.iter().enumerate() {
if delay <= range {
let level = &self.levels[level_idx];
// 计算槽位偏移
// 对于秒轮delay=5 -> slot 5 (从当前位置偏移5)
// 对于高层级轮:需要考虑何时会级联
let slots_away = if level_idx == 0 {
delay as usize
} else {
// 高层级轮中,任务会在 (slots_away * lower_level_total_ticks) 后级联
// 我们需要找到最小的 slots_away 使得 (slots_away * lower_level_total_ticks) >= delay
// 但不能太大,否则会延迟太久
//
// 例如delay=65, lower_level_range=60
// 我们希望在 tick 60 时级联slots_away=0然后剩余 5 秒在 L0 处理
//
// 公式slots_away = (delay - prev_range - 1) / lower_level_range
// delay=65, prev_range=60: (65-60-1)/60 = 0 -> 在当前槽级联
// delay=120, prev_range=60: (120-60-1)/60 = 0 -> 在当前槽级联
// delay=121, prev_range=60: (121-60-1)/60 = 1 -> 在下一槽级联
// 实际上更简单:我们计算从当前槽开始,需要多少个槽位
// delay 落在 (prev_range, range] 区间
// 每个槽位覆盖 level.interval_secs 秒
// 从当前槽开始slot[0] 在下一次级联时处理,覆盖 (prev_range, prev_range + interval]
// slot[1] 覆盖 (prev_range + interval, prev_range + 2*interval]
//
// slots_away = (delay - prev_range - 1) / interval
((delay - prev_range - 1) / level.interval_secs) as usize
};
let target_slot = (level.current + slots_away) % level.slot_count;
return Some((level_idx, target_slot));
}
prev_range = range;
}
// 超过最大范围,放入最高层级的最后一个槽位
let last_level = self.levels.len() - 1;
Some((
last_level,
(self.levels[last_level].current + self.levels[last_level].slot_count - 1)
% self.levels[last_level].slot_count,
))
}
/// 添加定时任务
pub fn add_task(&mut self, task: TimerTask) -> bool {
if let Some((level, slot)) = self.calculate_slot(task.trigger_at) {
self.task_index.insert(task.id, task.notification_id);
self.levels[level].add_to_slot(slot, task);
true
} else {
false
}
}
/// 取消任务(标记删除,实际在 tick 时过滤)
pub fn cancel_task(&mut self, task_id: Uuid) -> bool {
self.task_index.remove(&task_id).is_some()
}
/// 检查任务是否有效(未被取消)
fn is_task_valid(&self, task: &TimerTask) -> bool {
self.task_index.contains_key(&task.id)
}
/// 时间轮前进一秒
/// 返回需要执行的任务列表
pub fn tick(&mut self) -> Vec<TimerTask> {
// 先移动到下一个槽位
let cascade = self.levels[0].advance();
self.current_time += 1;
let mut ready_tasks = Vec::new();
// 级联处理高层级时间轮(在获取当前槽任务之前)
if cascade {
self.cascade_from_level(1);
}
// 从秒轮获取当前槽位的任务
let tasks = self.levels[0].take_current();
for task in tasks {
if self.is_task_valid(&task) {
if task.trigger_at <= self.current_time {
// 任务已到期,加入执行队列
self.task_index.remove(&task.id);
ready_tasks.push(task);
} else {
// 重新计算槽位(理论上不应该发生)
if let Some((level, slot)) = self.calculate_slot(task.trigger_at) {
self.levels[level].add_to_slot(slot, task);
}
}
}
}
ready_tasks
}
/// 从指定层级开始级联处理
fn cascade_from_level(&mut self, start_level: usize) {
if start_level >= self.levels.len() {
return;
}
// 从高层级轮取出当前槽位的任务,降级到低层级
let tasks = self.levels[start_level].take_current();
for task in tasks {
if self.is_task_valid(&task) {
// 重新计算槽位,降级到更低层级
if let Some((new_level, slot)) = self.calculate_slot(task.trigger_at) {
self.levels[new_level].add_to_slot(slot, task);
}
}
}
// 高层级轮前进
let cascade = self.levels[start_level].advance();
if cascade {
self.cascade_from_level(start_level + 1);
}
}
/// 获取时间轮状态统计
pub fn stats(&self) -> TimeWheelStats {
let mut total_tasks = 0;
let mut level_counts = Vec::new();
for level in &self.levels {
let count: usize = level.slots.iter().map(|s| s.len()).sum();
level_counts.push(count);
total_tasks += count;
}
TimeWheelStats {
total_tasks,
level_counts,
current_time: self.current_time,
}
}
}
impl Default for TimeWheel {
fn default() -> Self {
Self::new()
}
}
#[derive(Debug, Clone)]
pub struct TimeWheelStats {
pub total_tasks: usize,
pub level_counts: Vec<usize>,
pub current_time: i64,
}
/// 线程安全的时间轮包装
pub struct SharedTimeWheel {
inner: Arc<RwLock<TimeWheel>>,
}
impl SharedTimeWheel {
pub fn new() -> Self {
Self {
inner: Arc::new(RwLock::new(TimeWheel::new())),
}
}
pub async fn add_task(&self, task: TimerTask) -> bool {
let mut wheel = self.inner.write().await;
wheel.add_task(task)
}
pub async fn cancel_task(&self, task_id: Uuid) -> bool {
let mut wheel = self.inner.write().await;
wheel.cancel_task(task_id)
}
pub async fn tick(&self) -> Vec<TimerTask> {
let mut wheel = self.inner.write().await;
wheel.tick()
}
pub async fn stats(&self) -> TimeWheelStats {
let wheel = self.inner.read().await;
wheel.stats()
}
}
impl Default for SharedTimeWheel {
fn default() -> Self {
Self::new()
}
}
impl Clone for SharedTimeWheel {
fn clone(&self) -> Self {
Self {
inner: Arc::clone(&self.inner),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_add_task_immediate() {
let mut wheel = TimeWheel::new();
let now = wheel.current_time;
let task = TimerTask {
id: Uuid::new_v4(),
notification_id: Uuid::new_v4(),
trigger_at: now + 5, // 5秒后
};
assert!(wheel.add_task(task));
assert_eq!(wheel.stats().total_tasks, 1);
}
#[test]
fn test_tick_executes_task() {
let mut wheel = TimeWheel::new();
let now = wheel.current_time;
let task = TimerTask {
id: Uuid::new_v4(),
notification_id: Uuid::new_v4(),
trigger_at: now + 1, // 1秒后
};
wheel.add_task(task.clone());
// 前进1秒
let tasks = wheel.tick();
assert_eq!(tasks.len(), 1);
assert_eq!(tasks[0].notification_id, task.notification_id);
}
#[test]
fn test_cascade_from_minute_level() {
let mut wheel = TimeWheel::new();
let now = wheel.current_time;
let task = TimerTask {
id: Uuid::new_v4(),
notification_id: Uuid::new_v4(),
trigger_at: now + 65, // 65秒后应该放入分钟轮
};
wheel.add_task(task.clone());
// 分钟轮应该有任务
assert!(wheel.stats().level_counts[1] > 0 || wheel.stats().level_counts[0] > 0);
// 前进65秒
let mut executed = Vec::new();
for _ in 0..66 {
executed.extend(wheel.tick());
}
assert_eq!(executed.len(), 1);
assert_eq!(executed[0].notification_id, task.notification_id);
}
#[test]
fn test_cancel_task() {
let mut wheel = TimeWheel::new();
let now = wheel.current_time;
let task = TimerTask {
id: Uuid::new_v4(),
notification_id: Uuid::new_v4(),
trigger_at: now + 5,
};
wheel.add_task(task.clone());
assert!(wheel.cancel_task(task.id));
// 前进5秒任务不应执行
let mut executed = Vec::new();
for _ in 0..6 {
executed.extend(wheel.tick());
}
assert!(executed.is_empty());
}
}

View File

@@ -0,0 +1,842 @@
use sea_orm::{
ActiveModelTrait, ColumnTrait, DatabaseConnection, EntityTrait, PaginatorTrait, QueryFilter,
QueryOrder, Set,
};
use serde_json::json;
use std::sync::Arc;
use tokio::sync::mpsc;
use tokio::time::{Duration, interval};
use tracing::{error, info, warn};
use uuid::Uuid;
use crate::entity::sea_orm_active_enums::{ChannelType, NotificationStatus, TargetType};
use crate::entity::{
delivery_log, notification, recurrence_rule, reminder_offset, reminder_task, todo, user,
};
use crate::timer::recurrence::calculate_next_due;
use crate::timer::{SharedTimeWheel, TimerTask};
/// 重试退避时间(毫秒)
const BACKOFF_STEPS_MS: [i64; 5] = [
60_000, // 1 分钟
5 * 60_000, // 5 分钟
15 * 60_000, // 15 分钟
60 * 60_000, // 1 小时
4 * 60 * 60_000, // 4 小时
];
/// 最大重试次数
const MAX_RETRY_ATTEMPTS: i32 = 5;
/// 锁超时时间5 分钟)
const LOCK_TIMEOUT_MS: i64 = 5 * 60 * 1000;
/// 计算退避时间
fn calc_backoff_ms(attempt_no: i32) -> i64 {
let index = (attempt_no - 1)
.min(BACKOFF_STEPS_MS.len() as i32 - 1)
.max(0) as usize;
BACKOFF_STEPS_MS[index]
}
/// 默认的提醒偏移配置
struct DefaultOffset {
offset_minutes: i32,
channel_inapp: bool,
channel_bark: bool,
}
impl Default for DefaultOffset {
fn default() -> Self {
Self {
offset_minutes: 0,
channel_inapp: true,
channel_bark: false,
}
}
}
/// 通知推送 Worker
pub struct NotificationWorker {
db: DatabaseConnection,
time_wheel: SharedTimeWheel,
bark_client: reqwest::Client,
}
impl NotificationWorker {
pub fn new(db: DatabaseConnection, time_wheel: SharedTimeWheel) -> Self {
Self {
db,
time_wheel,
bark_client: reqwest::Client::new(),
}
}
/// 启动 Worker
/// 返回一个用于发送命令的 channel
pub async fn start(self) -> mpsc::Sender<WorkerCommand> {
let (tx, mut rx) = mpsc::channel::<WorkerCommand>(1000);
let worker = Arc::new(self);
// 时间轮 tick 任务
let tick_worker = Arc::clone(&worker);
tokio::spawn(async move {
let mut ticker = interval(Duration::from_secs(1));
loop {
ticker.tick().await;
let tasks = tick_worker.time_wheel.tick().await;
for task in tasks {
if let Err(e) = tick_worker.execute_notification(task.notification_id).await {
error!(
"Failed to execute notification {}: {}",
task.notification_id, e
);
}
}
}
});
// 定期从数据库加载待处理通知
let load_worker = Arc::clone(&worker);
tokio::spawn(async move {
let mut ticker = interval(Duration::from_secs(60)); // 每分钟检查一次
loop {
ticker.tick().await;
if let Err(e) = load_worker.load_pending_notifications().await {
error!("Failed to load pending notifications: {}", e);
}
}
});
// 命令处理
let cmd_worker = Arc::clone(&worker);
tokio::spawn(async move {
while let Some(cmd) = rx.recv().await {
match cmd {
WorkerCommand::ScheduleNotification {
notification_id,
trigger_at,
} => {
let task = TimerTask {
id: Uuid::new_v4(),
notification_id,
trigger_at,
};
cmd_worker.time_wheel.add_task(task).await;
}
WorkerCommand::CancelNotification { notification_id } => {
// 标记数据库中的通知为取消状态
if let Err(e) = cmd_worker.cancel_notification(notification_id).await {
error!("Failed to cancel notification {}: {}", notification_id, e);
}
}
WorkerCommand::GenerateNotifications {
target_type,
target_id,
} => {
if let Err(e) = cmd_worker
.generate_notifications(target_type.clone(), target_id)
.await
{
error!(
"Failed to generate notifications for {:?}/{}: {}",
target_type, target_id, e
);
}
}
}
}
});
tx
}
/// 从数据库加载待处理的通知到时间轮
async fn load_pending_notifications(
&self,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
let now = chrono::Utc::now().fixed_offset();
let future_limit = now + chrono::Duration::hours(25); // 加载未来25小时的通知
let expired_lock = now - chrono::Duration::milliseconds(LOCK_TIMEOUT_MS);
// 查找待处理的通知(包括锁超时的)
let notifications = notification::Entity::find()
.filter(notification::Column::Status.eq(NotificationStatus::Pending))
.filter(notification::Column::TriggerAt.lte(future_limit))
.filter(
sea_orm::Condition::any()
.add(notification::Column::LockedAt.is_null())
.add(notification::Column::LockedAt.lt(expired_lock)),
)
.order_by_asc(notification::Column::TriggerAt)
.all(&self.db)
.await?;
info!(
"Loading {} pending notifications into time wheel",
notifications.len()
);
for notif in notifications {
let trigger_at = notif.trigger_at.timestamp();
let task = TimerTask {
id: Uuid::new_v4(),
notification_id: notif.id,
trigger_at,
};
self.time_wheel.add_task(task).await;
}
Ok(())
}
/// 执行通知推送
async fn execute_notification(
&self,
notification_id: Uuid,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
// 获取通知
let notif = notification::Entity::find_by_id(notification_id)
.one(&self.db)
.await?
.ok_or("Notification not found")?;
// 检查状态
if notif.status != NotificationStatus::Pending {
warn!("Notification {} is not pending, skipping", notification_id);
return Ok(());
}
let now = chrono::Utc::now().fixed_offset();
// 锁定通知(防止重复处理)
let mut active: notification::ActiveModel = notif.clone().into();
active.status = Set(NotificationStatus::Queued);
active.locked_at = Set(Some(now));
active.updated_at = Set(now);
active.update(&self.db).await?;
// 获取当前尝试次数
let last_log = delivery_log::Entity::find()
.filter(delivery_log::Column::NotificationId.eq(notification_id))
.order_by_desc(delivery_log::Column::AttemptNo)
.one(&self.db)
.await?;
let attempt_no = last_log.map(|l| l.attempt_no + 1).unwrap_or(1);
// 获取接收者信息
let recipient = user::Entity::find_by_id(notif.recipient_id)
.one(&self.db)
.await?
.ok_or("Recipient not found")?;
// 根据渠道发送通知
let result: Result<(), String> = match notif.channel {
ChannelType::Inapp => {
// 应用内通知:只需要更新状态即可
Ok(())
}
ChannelType::Bark => {
if !recipient.bark_enabled {
Err("bark_disabled".to_string())
} else if let Some(bark_url) = &recipient.bark_url {
// 获取 offset 配置(如果有)
let offset = if let Some(offset_id) = notif.offset_id {
reminder_offset::Entity::find_by_id(offset_id)
.one(&self.db)
.await
.ok()
.flatten()
} else {
None
};
// 获取目标详情
match self.get_notification_content(&notif).await {
Ok((default_title, default_body)) => {
// 使用 offset 中的自定义参数,如果没有则使用默认值
let title = offset
.as_ref()
.and_then(|o| o.bark_title.clone())
.unwrap_or(default_title);
let subtitle = offset.as_ref().and_then(|o| o.bark_subtitle.clone());
let body_or_markdown = offset
.as_ref()
.and_then(|o| o.bark_body_markdown.clone())
.or(Some(default_body));
let level = offset.as_ref().and_then(|o| o.bark_level.clone());
let icon = offset
.as_ref()
.and_then(|o| o.bark_icon.clone())
.or_else(|| recipient.avatar.clone());
tracing::info!("Sending Bark notification with icon: {:?}", icon);
self.send_bark_notification(
bark_url,
&title,
subtitle.as_deref(),
body_or_markdown.as_deref(),
level.as_deref(),
icon.as_deref(),
offset
.as_ref()
.and_then(|o| o.bark_body_markdown.as_ref())
.is_some(),
)
.await
.map_err(|e| e.to_string())
}
Err(e) => Err(e.to_string()),
}
} else {
Err("no_bark_url".to_string())
}
}
};
// 根据结果更新状态
let target_type = notif.target_type.clone();
let target_id = notif.target_id;
let channel = notif.channel.clone();
let original_trigger_at = notif.trigger_at;
match &result {
Ok(_) => {
// 发送成功
let mut active: notification::ActiveModel = notif.into();
active.status = Set(NotificationStatus::Sent);
active.sent_at = Set(Some(now));
active.locked_at = Set(None);
active.updated_at = Set(now);
active.update(&self.db).await?;
// 记录成功日志
self.create_delivery_log(
notification_id,
attempt_no,
channel,
NotificationStatus::Sent,
None,
)
.await?;
// 检查是否需要推进周期
self.check_and_advance_recurrence(target_type, target_id)
.await?;
}
Err(error_msg) => {
// 发送失败,检查是否需要重试
let should_retry = attempt_no < MAX_RETRY_ATTEMPTS;
if should_retry {
// 计算重试时间
let backoff_ms = calc_backoff_ms(attempt_no);
let retry_at = now + chrono::Duration::milliseconds(backoff_ms);
let mut active: notification::ActiveModel = notif.into();
active.status = Set(NotificationStatus::Pending);
active.trigger_at = Set(retry_at);
active.locked_at = Set(None);
active.updated_at = Set(now);
active.update(&self.db).await?;
// 重新添加到时间轮
let task = TimerTask {
id: Uuid::new_v4(),
notification_id,
trigger_at: retry_at.timestamp(),
};
self.time_wheel.add_task(task).await;
info!(
"Notification {} scheduled for retry at {} (attempt {})",
notification_id, retry_at, attempt_no
);
// 记录重试日志
self.create_delivery_log(
notification_id,
attempt_no,
channel,
NotificationStatus::Pending,
Some(json!({ "message": error_msg })),
)
.await?;
} else {
// 超过最大重试次数,标记失败
let mut active: notification::ActiveModel = notif.into();
active.status = Set(NotificationStatus::Failed);
active.trigger_at = Set(original_trigger_at); // 恢复原始 trigger_at
active.locked_at = Set(None);
active.updated_at = Set(now);
active.update(&self.db).await?;
error!(
"Notification {} failed after {} attempts: {}",
notification_id, attempt_no, error_msg
);
// 记录失败日志
self.create_delivery_log(
notification_id,
attempt_no,
channel,
NotificationStatus::Failed,
Some(json!({ "message": error_msg })),
)
.await?;
}
}
}
Ok(())
}
/// 创建投递日志
async fn create_delivery_log(
&self,
notification_id: Uuid,
attempt_no: i32,
channel: ChannelType,
status: NotificationStatus,
response_meta: Option<serde_json::Value>,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
let log = delivery_log::ActiveModel {
id: Set(Uuid::new_v4()),
notification_id: Set(notification_id),
attempt_no: Set(attempt_no),
channel: Set(channel),
status: Set(status),
response_meta: Set(response_meta),
created_at: Set(chrono::Utc::now().fixed_offset()),
};
log.insert(&self.db).await?;
Ok(())
}
/// 检查目标是否有周期性规则,如果所有当前周期的通知都已处理,则推进到下一周期
async fn check_and_advance_recurrence(
&self,
target_type: TargetType,
target_id: Uuid,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
// 检查是否还有该目标的待处理通知
let pending_count = notification::Entity::find()
.filter(notification::Column::TargetType.eq(target_type.clone()))
.filter(notification::Column::TargetId.eq(target_id))
.filter(notification::Column::Status.eq(NotificationStatus::Pending))
.count(&self.db)
.await?;
if pending_count > 0 {
// 还有未处理的通知,不需要推进周期
return Ok(());
}
// 获取目标及其循环规则
match target_type {
TargetType::Todo => {
self.advance_todo_recurrence(target_id).await?;
}
TargetType::ReminderTask => {
self.advance_reminder_task_recurrence(target_id).await?;
}
}
Ok(())
}
/// 推进 Todo 到下一周期
async fn advance_todo_recurrence(
&self,
todo_id: Uuid,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
let todo_item = todo::Entity::find_by_id(todo_id).one(&self.db).await?;
let todo_item = match todo_item {
Some(t) => t,
None => return Ok(()), // Todo 已被删除
};
// 检查是否有循环规则
let rule_id = match todo_item.recurrence_rule_id {
Some(id) => id,
None => return Ok(()), // 非周期性任务
};
let rule = recurrence_rule::Entity::find_by_id(rule_id)
.one(&self.db)
.await?;
let rule = match rule {
Some(r) => r,
None => return Ok(()), // 规则已被删除
};
// 计算下一个 due_at
let next_due = match calculate_next_due(&rule, todo_item.due_at) {
Some(d) => d,
None => {
warn!("Failed to calculate next due for todo {}", todo_id);
return Ok(());
}
};
info!(
"Advancing todo {} from {} to {}",
todo_id, todo_item.due_at, next_due
);
// 更新 Todo 的 due_at 并重置打卡状态
let mut active: todo::ActiveModel = todo_item.into();
active.due_at = Set(next_due);
active.is_checked_in = Set(false); // 重置打卡状态,开启下一周期的打卡
active.updated_at = Set(chrono::Utc::now().fixed_offset());
active.update(&self.db).await?;
// 为下一周期生成通知
self.generate_notifications(TargetType::Todo, todo_id)
.await?;
Ok(())
}
/// 推进 ReminderTask 到下一周期
async fn advance_reminder_task_recurrence(
&self,
task_id: Uuid,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
let task = reminder_task::Entity::find_by_id(task_id)
.one(&self.db)
.await?;
let task = match task {
Some(t) => t,
None => return Ok(()), // Task 已被删除
};
// 检查是否有循环规则
let rule_id = match task.recurrence_rule_id {
Some(id) => id,
None => return Ok(()), // 非周期性任务
};
let rule = recurrence_rule::Entity::find_by_id(rule_id)
.one(&self.db)
.await?;
let rule = match rule {
Some(r) => r,
None => return Ok(()), // 规则已被删除
};
// 计算下一个 due_at
let next_due = match calculate_next_due(&rule, task.due_at) {
Some(d) => d,
None => {
warn!("Failed to calculate next due for reminder_task {}", task_id);
return Ok(());
}
};
info!(
"Advancing reminder_task {} from {} to {}",
task_id, task.due_at, next_due
);
// 更新 ReminderTask 的 due_at
let mut active: reminder_task::ActiveModel = task.into();
active.due_at = Set(next_due);
active.updated_at = Set(chrono::Utc::now().fixed_offset());
active.update(&self.db).await?;
// 为下一周期生成通知
self.generate_notifications(TargetType::ReminderTask, task_id)
.await?;
Ok(())
}
/// 获取通知内容
async fn get_notification_content(
&self,
notif: &notification::Model,
) -> Result<(String, String), Box<dyn std::error::Error + Send + Sync>> {
match notif.target_type {
TargetType::Todo => {
let todo = todo::Entity::find_by_id(notif.target_id)
.one(&self.db)
.await?
.ok_or("Todo not found")?;
Ok((todo.title, todo.description.unwrap_or_default()))
}
TargetType::ReminderTask => {
let task = reminder_task::Entity::find_by_id(notif.target_id)
.one(&self.db)
.await?
.ok_or("ReminderTask not found")?;
Ok((task.title, task.description.unwrap_or_default()))
}
}
}
/// 发送 Bark 通知
async fn send_bark_notification(
&self,
bark_url: &str,
title: &str,
subtitle: Option<&str>,
body: Option<&str>,
level: Option<&str>,
icon: Option<&str>,
is_markdown: bool,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
// 构建 Bark 推送参数
let mut payload = json!({
"title": title,
"group": "notify",
});
// 添加可选参数
if let Some(sub) = subtitle {
payload["subtitle"] = json!(sub);
}
// 如果是 markdown 格式,使用 markdown 字段;否则使用 body 字段
if is_markdown {
if let Some(content) = body {
payload["markdown"] = json!(content);
}
} else if let Some(content) = body {
payload["body"] = json!(content);
}
// 添加推送级别
if let Some(lvl) = level {
payload["level"] = json!(lvl);
}
// 添加图标
if let Some(ic) = icon {
payload["icon"] = json!(ic);
}
let response = self
.bark_client
.post(bark_url)
.header("Content-Type", "application/json")
.json(&payload)
.timeout(Duration::from_secs(10))
.send()
.await?;
if !response.status().is_success() {
let status = response.status();
let text = response.text().await.unwrap_or_default();
return Err(format!("Bark API error: {} - {}", status, text).into());
}
info!("Bark notification sent successfully");
Ok(())
}
/// 取消通知
async fn cancel_notification(
&self,
notification_id: Uuid,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
let notif = notification::Entity::find_by_id(notification_id)
.one(&self.db)
.await?;
if let Some(notif) = notif {
if notif.status == NotificationStatus::Pending {
let mut active: notification::ActiveModel = notif.into();
active.status = Set(NotificationStatus::Failed);
active.updated_at = Set(chrono::Utc::now().fixed_offset());
active.update(&self.db).await?;
}
}
Ok(())
}
/// 为目标生成通知
/// 当创建或更新 Todo/ReminderTask 时调用
pub async fn generate_notifications(
&self,
target_type: TargetType,
target_id: Uuid,
) -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
// 删除旧的 pending 通知
notification::Entity::delete_many()
.filter(notification::Column::TargetType.eq(target_type.clone()))
.filter(notification::Column::TargetId.eq(target_id))
.filter(notification::Column::Status.eq(NotificationStatus::Pending))
.exec(&self.db)
.await?;
// 获取目标的截止时间和接收者
let (due_at, recipient_ids) = match target_type {
TargetType::Todo => {
let todo = todo::Entity::find_by_id(target_id)
.one(&self.db)
.await?
.ok_or("Todo not found")?;
(todo.due_at, vec![todo.owner_id])
}
TargetType::ReminderTask => {
let task = reminder_task::Entity::find_by_id(target_id)
.one(&self.db)
.await?
.ok_or("ReminderTask not found")?;
// 获取所有接收者
use crate::entity::reminder_task_recipient;
let recipients = reminder_task_recipient::Entity::find()
.filter(reminder_task_recipient::Column::TaskId.eq(target_id))
.all(&self.db)
.await?;
let recipient_ids: Vec<Uuid> = recipients.into_iter().map(|r| r.user_id).collect();
(task.due_at, recipient_ids)
}
};
// 获取提醒偏移配置
let db_offsets = reminder_offset::Entity::find()
.filter(reminder_offset::Column::TargetType.eq(target_type.clone()))
.filter(reminder_offset::Column::TargetId.eq(target_id))
.all(&self.db)
.await?;
// 如果没有配置偏移,使用默认偏移
// (offset_minutes, channel_inapp, channel_bark, offset_id)
let offsets: Vec<(i32, bool, bool, Option<Uuid>)> = if db_offsets.is_empty() {
let default = DefaultOffset::default();
vec![(
default.offset_minutes,
default.channel_inapp,
default.channel_bark,
None, // No offset_id for default offset
)]
} else {
db_offsets
.iter()
.map(|o| {
(
o.offset_minutes,
o.channel_inapp,
o.channel_bark,
Some(o.id),
)
})
.collect()
};
let now = chrono::Utc::now().fixed_offset();
// 为每个接收者和每个偏移生成通知
for recipient_id in &recipient_ids {
// 获取接收者的通知偏好
let user = user::Entity::find_by_id(*recipient_id)
.one(&self.db)
.await?;
let (inapp_enabled, bark_enabled, has_bark_url) = match &user {
Some(u) => (u.inapp_enabled, u.bark_enabled, u.bark_url.is_some()),
None => continue, // 用户不存在,跳过
};
for (offset_minutes, channel_inapp, channel_bark, offset_id) in &offsets {
let trigger_at = due_at - chrono::Duration::minutes(*offset_minutes as i64);
// 跳过已过期的通知
if trigger_at <= now {
continue;
}
// 生成 inapp 通知(检查用户是否启用)
if *channel_inapp && inapp_enabled {
let notif = notification::ActiveModel {
id: Set(Uuid::new_v4()),
recipient_id: Set(*recipient_id),
target_type: Set(target_type.clone()),
target_id: Set(target_id),
trigger_at: Set(trigger_at),
channel: Set(ChannelType::Inapp),
status: Set(NotificationStatus::Pending),
locked_at: Set(None),
sent_at: Set(None),
read_at: Set(None),
created_at: Set(now),
updated_at: Set(now),
offset_id: Set(*offset_id),
};
if let Ok(created) = notif.insert(&self.db).await {
// 添加到时间轮
let task = TimerTask {
id: Uuid::new_v4(),
notification_id: created.id,
trigger_at: trigger_at.timestamp(),
};
self.time_wheel.add_task(task).await;
}
}
// 生成 bark 通知(检查用户是否启用且有 URL
if *channel_bark && bark_enabled && has_bark_url {
let notif = notification::ActiveModel {
id: Set(Uuid::new_v4()),
recipient_id: Set(*recipient_id),
target_type: Set(target_type.clone()),
target_id: Set(target_id),
trigger_at: Set(trigger_at),
channel: Set(ChannelType::Bark),
status: Set(NotificationStatus::Pending),
locked_at: Set(None),
sent_at: Set(None),
read_at: Set(None),
created_at: Set(now),
updated_at: Set(now),
offset_id: Set(*offset_id),
};
if let Ok(created) = notif.insert(&self.db).await {
// 添加到时间轮
let task = TimerTask {
id: Uuid::new_v4(),
notification_id: created.id,
trigger_at: trigger_at.timestamp(),
};
self.time_wheel.add_task(task).await;
}
}
}
}
info!(
"Generated notifications for {:?}/{}",
target_type, target_id
);
Ok(())
}
}
/// Worker 命令
#[derive(Debug)]
pub enum WorkerCommand {
/// 调度一个通知
ScheduleNotification {
notification_id: Uuid,
trigger_at: i64,
},
/// 取消一个通知
CancelNotification { notification_id: Uuid },
/// 为目标生成通知
GenerateNotifications {
target_type: TargetType,
target_id: Uuid,
},
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 13 KiB

60
docker-compose.prod.yml Normal file
View File

@@ -0,0 +1,60 @@
version: "3.8"
services:
postgres:
image: postgres:16-alpine
container_name: notify-postgres
restart: unless-stopped
environment:
POSTGRES_USER: ${POSTGRES_USER:-notify}
POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:?POSTGRES_PASSWORD is required}
POSTGRES_DB: ${POSTGRES_DB:-notify}
volumes:
- postgres_data:/var/lib/postgresql/data
# 生产环境不对外暴露数据库端口
expose:
- "5432"
healthcheck:
test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER:-notify} -d ${POSTGRES_DB:-notify}"]
interval: 5s
timeout: 5s
retries: 5
backend:
build:
context: ./backend_rust
dockerfile: Dockerfile
container_name: notify-backend
restart: unless-stopped
depends_on:
postgres:
condition: service_healthy
environment:
DATABASE_URL: postgresql://${POSTGRES_USER:-notify}:${POSTGRES_PASSWORD}@postgres:5432/${POSTGRES_DB:-notify}
JWT_SECRET: ${JWT_SECRET:?JWT_SECRET is required}
BASE_URL: ${BASE_URL:-https://notify.example.com}
ports:
- "127.0.0.1:4000:4000"
volumes:
- uploads_data:/app/uploads
frontend:
build:
context: ./frontend
dockerfile: Dockerfile
args:
# 构建时注入 API URL用于 Next.js 静态优化)
NEXT_PUBLIC_API_URL: ${NEXT_PUBLIC_API_URL:-}
container_name: notify-frontend
restart: unless-stopped
depends_on:
- backend
environment:
# 运行时 API URL用于 SSR
NEXT_PUBLIC_API_URL: ${NEXT_PUBLIC_API_URL:-}
ports:
- "127.0.0.1:3000:3000"
volumes:
postgres_data:
uploads_data:

54
docker-compose.yml Normal file
View File

@@ -0,0 +1,54 @@
version: "3.8"
services:
postgres:
image: postgres:16-alpine
container_name: notify-postgres
restart: unless-stopped
environment:
POSTGRES_USER: notify
POSTGRES_PASSWORD: notify_secret
POSTGRES_DB: notify
volumes:
- postgres_data:/var/lib/postgresql/data
ports:
- "5432:5432"
healthcheck:
test: ["CMD-SHELL", "pg_isready -U notify -d notify"]
interval: 5s
timeout: 5s
retries: 5
backend:
build:
context: ./backend_rust
dockerfile: Dockerfile
container_name: notify-backend
restart: unless-stopped
depends_on:
postgres:
condition: service_healthy
environment:
DATABASE_URL: postgresql://notify:notify_secret@postgres:5432/notify
JWT_SECRET: ${JWT_SECRET:-change_me_in_production}
ports:
- "4000:4000"
volumes:
- uploads_data:/app/uploads
frontend:
build:
context: ./frontend
dockerfile: Dockerfile
container_name: notify-frontend
restart: unless-stopped
depends_on:
- backend
environment:
NEXT_PUBLIC_API_URL: http://backend:4000
ports:
- "3000:3000"
volumes:
postgres_data:
uploads_data:

151
docs/spec.md Normal file
View File

@@ -0,0 +1,151 @@
# Notify 产品与技术规格
## 目标与定位
- **目标**:打造简洁明了、同时适配桌面与移动浏览器的提醒应用。
- **核心能力**Todo 与多用户提醒、周期性规则、提前提醒、网页通知 + Bark 推送。
## 需求澄清后的产品规格
### 核心对象
- **Todo**:个人待办,支持单次与周期性,配置一个或多个提醒策略。
- **提醒任务**:可指定多个接收者(含自己),支持单次与周期性,提醒策略与触达方式同 Todo。
- **通知**:一次提醒事件在“某接收者”维度的投递记录,支持站内与 Bark 两种渠道。
- **邀请码**:仅邀请码注册,所有用户可生成/管理邀请码。
### 业务规则与边界条件
1. **周期性任务的下一次触发时间**
- 使用 **用户时区**(默认 `Asia/Shanghai` 可配置)。
- 规则类型:`hourly | daily | weekly | monthly | yearly`
- 计算方式:以“本次 dueAt”为基准按规则添加时间并做 **日历对齐**
- `hourly`: `dueAt + n hours`
- `daily`: `dueAt + n days`(保持时分)
- `weekly`: `dueAt + n weeks`(保持星期与时分)
- `monthly`: 若下月无对应日期,则取 **该月最后一天** 同时分
- `yearly`: 若跨闰年导致日期不存在,取 **当年同月最后一天** 同时分
2. **多提醒策略**
- 每个任务可配置多条提前量(如 10 分钟、1 小时)。
- 对每条提前量生成独立触发点:`triggerAt = dueAt - offsetMinutes`
3. **浏览器不在线**
- 服务端仍生成通知记录(状态 `pending/queued/sent`)。
- 用户上线后,客户端拉取未读站内通知并展示(通知中心 + 弹窗)。
4. **Bark 推送失败/重试**
- 失败进入重试队列指数退避1m/5m/15m/1h
- 最多重试 5 次,超出后标记 `failed`
- 幂等:同一通知记录仅允许一次成功发送。
5. **多用户提醒**
- 每个接收者创建独立通知记录(便于去重与投递状态追踪)。
6. **邀请码策略(默认方案)**
- 每个邀请码 **可用次数 = 5**
- **有效期 = 7 天**,可撤销。
- 邀请码可由任意已注册用户生成与管理。
## 数据模型(关系型)
### 核心表
- `users`: 用户
- `invites`: 邀请码
- `todos`: 个人待办
- `reminder_tasks`: 多用户提醒任务
- `recurrence_rules`: 周期规则
- `reminder_offsets`: 提前提醒策略
- `notifications`: 通知实例(按接收者维度)
- `delivery_logs`: 投递日志(站内/Bark
### 字段示意(详见 schema
- `users(id, username, password_hash, timezone, bark_url, created_at)`
- `invites(id, code, creator_id, max_uses, used_count, expires_at, revoked_at)`
- `todos(id, owner_id, title, description, due_at, recurrence_rule_id)`
- `reminder_tasks(id, creator_id, title, due_at, recurrence_rule_id)`
- `reminder_task_recipients(task_id, user_id)`
- `recurrence_rules(id, type, interval, by_weekday, by_monthday, timezone)`
- `reminder_offsets(id, target_type, target_id, offset_minutes, channel_inapp, channel_bark)`
- `notifications(id, recipient_id, target_type, target_id, trigger_at, channel, status)`
- `delivery_logs(id, notification_id, attempt_no, channel, status, response_meta)`
## 核心接口设计REST
### 认证与邀请码
- `POST /api/auth/register` { username, password, inviteCode }
- `POST /api/auth/login` { username, password } -> JWT
- `POST /api/invites` 创建邀请码
- `GET /api/invites` 获取邀请码列表
- `POST /api/invites/:id/revoke`
### Todo
- `GET /api/todos`
- `POST /api/todos`
- `GET /api/todos/:id`
- `PUT /api/todos/:id`
- `DELETE /api/todos/:id`
### 多用户提醒任务
- `GET /api/reminder-tasks`
- `POST /api/reminder-tasks`
- `GET /api/reminder-tasks/:id`
- `PUT /api/reminder-tasks/:id`
- `DELETE /api/reminder-tasks/:id`
### 用户与设置
- `GET /api/users?query=`
- `GET /api/me`
- `PUT /api/me/settings` { timezone, barkUrl, inappEnabled, barkEnabled }
### 通知中心
- `GET /api/notifications?status=unread`
- `POST /api/notifications/:id/read`
## Bark 推送设计
### 调用形式
- 采用 Bark 官方接口:
- GET: `https://bark.server/push/{title}/{body}?group=notify&icon=...`
- POST: JSON body `{ title, body, group, icon, url, badge, sound }`
### 发送内容
- `title`: 任务标题
- `body`: 触发时间 + 备注
- `group`: `notify`
- 额外参数:`url` 指向站内通知详情
## 调度方案(可靠与幂等)
### 核心思想
-**通知表** 为唯一投递来源(幂等)。
- 调度器只负责生成通知实例;投递 worker 只发送 `pending` 通知。
### 通知状态机
- `pending` -> `queued` -> `sent``failed`
### 触发流程
1. 用户创建/更新任务 -> 生成 `next_due_at`
2. 生成通知实例:`trigger_at = due_at - offset`
3. Worker 扫描 `trigger_at <= now``status = pending`,锁定并投递
4. 成功则更新 `sent`,失败则记录 `failed` 并按策略重试
## 关键页面与交互
1. **登录/注册(邀请码)**
- 注册页要求邀请码与密码确认
2. **Todo 列表**
- 列表 + 新增/编辑弹窗
- 支持单次/周期选择、提前提醒策略
3. **提醒任务**
- 支持多接收者选择、搜索用户
4. **用户列表**
- 全部用户可见,支持搜索
5. **个人设置**
- Bark 链接、站内通知偏好
6. **通知中心**
- 未读/历史提醒,点击标记已读
## 工程结构与实现要点
- `backend/`: REST API、调度 worker、Bark 集成
- `frontend/`: Next.js UI响应式布局
- `docker-compose.yml`: 数据库与服务
## 示例伪代码
### 计算下一次触发
```
function nextDueAt(dueAt, rule) {
switch (rule.type) {
case "monthly":
return addMonthWithClamp(dueAt, rule.interval);
case "yearly":
return addYearWithClamp(dueAt, rule.interval);
// hourly/daily/weekly...
}
}
```

8
frontend/.dockerignore Normal file
View File

@@ -0,0 +1,8 @@
node_modules
.next
.env
.env.*
*.log
.git
.gitignore
README.md

1
frontend/.gitignore vendored Normal file
View File

@@ -0,0 +1 @@
.next

45
frontend/Dockerfile Normal file
View File

@@ -0,0 +1,45 @@
# Build stage
FROM node:20-alpine AS builder
WORKDIR /app
# 构建参数NEXT_PUBLIC_* 变量需要在构建时注入)
ARG NEXT_PUBLIC_API_URL
ENV NEXT_PUBLIC_API_URL=${NEXT_PUBLIC_API_URL}
# Copy package files
COPY package*.json ./
# Install dependencies
RUN npm ci
# Copy source code
COPY . .
# Build Next.js app
RUN npm run build
# Production stage
FROM node:20-alpine AS runner
WORKDIR /app
ENV NODE_ENV=production
# Create non-root user
RUN addgroup --system --gid 1001 nodejs
RUN adduser --system --uid 1001 nextjs
# Copy necessary files from builder
COPY --from=builder /app/public ./public
COPY --from=builder /app/.next/standalone ./
COPY --from=builder /app/.next/static ./.next/static
USER nextjs
EXPOSE 3000
ENV PORT=3000
ENV HOSTNAME="0.0.0.0"
CMD ["node", "server.js"]

22
frontend/components.json Normal file
View File

@@ -0,0 +1,22 @@
{
"$schema": "https://ui.shadcn.com/schema.json",
"style": "new-york",
"rsc": true,
"tsx": true,
"tailwind": {
"config": "tailwind.config.ts",
"css": "src/app/globals.css",
"baseColor": "neutral",
"cssVariables": true,
"prefix": ""
},
"iconLibrary": "lucide",
"aliases": {
"components": "@/components",
"utils": "@/lib/utils",
"ui": "@/components/ui",
"lib": "@/lib",
"hooks": "@/hooks"
},
"registries": {}
}

5
frontend/next-env.d.ts vendored Normal file
View File

@@ -0,0 +1,5 @@
/// <reference types="next" />
/// <reference types="next/image-types/global" />
// NOTE: This file should not be edited
// see https://nextjs.org/docs/app/building-your-application/configuring/typescript for more information.

7
frontend/next.config.js Normal file
View File

@@ -0,0 +1,7 @@
/** @type {import('next').NextConfig} */
const nextConfig = {
reactStrictMode: true,
output: 'standalone'
};
module.exports = nextConfig;

2917
frontend/package-lock.json generated Normal file

File diff suppressed because it is too large Load Diff

45
frontend/package.json Normal file
View File

@@ -0,0 +1,45 @@
{
"name": "notify-frontend",
"version": "0.1.0",
"private": true,
"scripts": {
"dev": "next dev",
"build": "next build",
"start": "next start"
},
"dependencies": {
"@hookform/resolvers": "^5.2.2",
"@radix-ui/react-alert-dialog": "^1.1.15",
"@radix-ui/react-checkbox": "^1.3.3",
"@radix-ui/react-dialog": "^1.1.15",
"@radix-ui/react-label": "^2.1.8",
"@radix-ui/react-popover": "^1.1.15",
"@radix-ui/react-scroll-area": "^1.2.10",
"@radix-ui/react-select": "^2.2.6",
"@radix-ui/react-separator": "^1.1.8",
"@radix-ui/react-slot": "^1.2.4",
"@tailwindcss/postcss": "^4.1.18",
"autoprefixer": "^10.4.23",
"class-variance-authority": "^0.7.1",
"clsx": "^2.1.1",
"date-fns": "^4.1.0",
"lucide-react": "^0.562.0",
"next": "^14.2.5",
"next-themes": "^0.4.6",
"postcss": "^8.5.6",
"react": "^18.3.1",
"react-day-picker": "^9.13.0",
"react-dom": "^18.3.1",
"react-hook-form": "^7.71.1",
"sonner": "^2.0.7",
"tailwind-merge": "^3.4.0",
"tailwindcss": "^4.1.18",
"tailwindcss-animate": "^1.0.7",
"zod": "^4.3.6"
},
"devDependencies": {
"@types/node": "^20.14.2",
"@types/react": "^18.3.3",
"typescript": "^5.5.2"
}
}

View File

@@ -0,0 +1,6 @@
module.exports = {
plugins: {
"@tailwindcss/postcss": {},
autoprefixer: {},
},
};

View File

@@ -0,0 +1,182 @@
@import "tailwindcss";
@plugin "tailwindcss-animate";
@custom-variant dark (&:is(.dark *));
@layer base {
:root {
--background: 220 20% 97%;
--foreground: 220 20% 10%;
--card: 0 0% 100%;
--card-foreground: 220 20% 10%;
--popover: 0 0% 100%;
--popover-foreground: 220 20% 10%;
--primary: 220 80% 55%;
--primary-foreground: 0 0% 100%;
--secondary: 220 15% 94%;
--secondary-foreground: 220 20% 20%;
--muted: 220 15% 94%;
--muted-foreground: 220 10% 50%;
--accent: 220 15% 94%;
--accent-foreground: 220 20% 20%;
--destructive: 0 72% 55%;
--destructive-foreground: 0 0% 100%;
--border: 220 15% 92%;
--input: 220 15% 90%;
--ring: 220 80% 55%;
--radius: 0.5rem;
--sidebar: 0 0% 100%;
--sidebar-foreground: 220 20% 20%;
--sidebar-primary: 220 80% 55%;
--sidebar-primary-foreground: 0 0% 100%;
--sidebar-accent: 220 15% 96%;
--sidebar-accent-foreground: 220 20% 20%;
--sidebar-border: 220 15% 94%;
--sidebar-ring: 220 80% 55%;
}
html {
font-size: 28px;
}
* {
border-color: hsl(var(--border) / 0.6);
}
body {
background-color: hsl(var(--background));
color: hsl(var(--foreground));
}
}
@theme inline {
--radius-sm: calc(var(--radius) - 4px);
--radius-md: calc(var(--radius) - 2px);
--radius-lg: var(--radius);
--radius-xl: calc(var(--radius) + 4px);
--radius-2xl: calc(var(--radius) + 8px);
--radius-3xl: calc(var(--radius) + 12px);
--radius-4xl: calc(var(--radius) + 16px);
--color-background: var(--background);
--color-foreground: var(--foreground);
--color-card: var(--card);
--color-card-foreground: var(--card-foreground);
--color-popover: var(--popover);
--color-popover-foreground: var(--popover-foreground);
--color-primary: var(--primary);
--color-primary-foreground: var(--primary-foreground);
--color-secondary: var(--secondary);
--color-secondary-foreground: var(--secondary-foreground);
--color-muted: var(--muted);
--color-muted-foreground: var(--muted-foreground);
--color-accent: var(--accent);
--color-accent-foreground: var(--accent-foreground);
--color-destructive: var(--destructive);
--color-border: var(--border);
--color-input: var(--input);
--color-ring: var(--ring);
--color-chart-1: var(--chart-1);
--color-chart-2: var(--chart-2);
--color-chart-3: var(--chart-3);
--color-chart-4: var(--chart-4);
--color-chart-5: var(--chart-5);
--color-sidebar: var(--sidebar);
--color-sidebar-foreground: var(--sidebar-foreground);
--color-sidebar-primary: var(--sidebar-primary);
--color-sidebar-primary-foreground: var(--sidebar-primary-foreground);
--color-sidebar-accent: var(--sidebar-accent);
--color-sidebar-accent-foreground: var(--sidebar-accent-foreground);
--color-sidebar-border: var(--sidebar-border);
--color-sidebar-ring: var(--sidebar-ring);
}
:root {
--radius: 0.625rem;
--background: oklch(1 0 0);
--foreground: oklch(0.145 0 0);
--card: oklch(1 0 0);
--card-foreground: oklch(0.145 0 0);
--popover: oklch(1 0 0);
--popover-foreground: oklch(0.145 0 0);
--primary: oklch(0.205 0 0);
--primary-foreground: oklch(0.985 0 0);
--secondary: oklch(0.97 0 0);
--secondary-foreground: oklch(0.205 0 0);
--muted: oklch(0.97 0 0);
--muted-foreground: oklch(0.556 0 0);
--accent: oklch(0.97 0 0);
--accent-foreground: oklch(0.205 0 0);
--destructive: oklch(0.577 0.245 27.325);
--border: oklch(0.922 0 0);
--input: oklch(0.922 0 0);
--ring: oklch(0.708 0 0);
--chart-1: oklch(0.646 0.222 41.116);
--chart-2: oklch(0.6 0.118 184.704);
--chart-3: oklch(0.398 0.07 227.392);
--chart-4: oklch(0.828 0.189 84.429);
--chart-5: oklch(0.769 0.188 70.08);
--sidebar: oklch(0.985 0 0);
--sidebar-foreground: oklch(0.145 0 0);
--sidebar-primary: oklch(0.205 0 0);
--sidebar-primary-foreground: oklch(0.985 0 0);
--sidebar-accent: oklch(0.97 0 0);
--sidebar-accent-foreground: oklch(0.205 0 0);
--sidebar-border: oklch(0.922 0 0);
--sidebar-ring: oklch(0.708 0 0);
}
.dark {
--background: oklch(0.145 0 0);
--foreground: oklch(0.985 0 0);
--card: oklch(0.205 0 0);
--card-foreground: oklch(0.985 0 0);
--popover: oklch(0.205 0 0);
--popover-foreground: oklch(0.985 0 0);
--primary: oklch(0.922 0 0);
--primary-foreground: oklch(0.205 0 0);
--secondary: oklch(0.269 0 0);
--secondary-foreground: oklch(0.985 0 0);
--muted: oklch(0.269 0 0);
--muted-foreground: oklch(0.708 0 0);
--accent: oklch(0.269 0 0);
--accent-foreground: oklch(0.985 0 0);
--destructive: oklch(0.704 0.191 22.216);
--border: oklch(1 0 0 / 10%);
--input: oklch(1 0 0 / 15%);
--ring: oklch(0.556 0 0);
--chart-1: oklch(0.488 0.243 264.376);
--chart-2: oklch(0.696 0.17 162.48);
--chart-3: oklch(0.769 0.188 70.08);
--chart-4: oklch(0.627 0.265 303.9);
--chart-5: oklch(0.645 0.246 16.439);
--sidebar: oklch(0.205 0 0);
--sidebar-foreground: oklch(0.985 0 0);
--sidebar-primary: oklch(0.488 0.243 264.376);
--sidebar-primary-foreground: oklch(0.985 0 0);
--sidebar-accent: oklch(0.269 0 0);
--sidebar-accent-foreground: oklch(0.985 0 0);
--sidebar-border: oklch(1 0 0 / 10%);
--sidebar-ring: oklch(0.556 0 0);
}
@layer base {
* {
@apply border-border outline-ring/50;
}
body {
@apply bg-background text-foreground;
}
}
@layer utilities {
.scrollbar-hide {
-ms-overflow-style: none;
scrollbar-width: none;
}
.scrollbar-hide::-webkit-scrollbar {
display: none;
}
}

View File

@@ -0,0 +1,301 @@
"use client";
import { useEffect, useState } from "react";
import AppShell from "@/components/AppShell";
import Avatar from "@/components/ui/avatar";
import { Check, Copy, Eye, Users } from "lucide-react";
import {
AlertDialog,
AlertDialogAction,
AlertDialogCancel,
AlertDialogContent,
AlertDialogDescription,
AlertDialogFooter,
AlertDialogHeader,
AlertDialogTitle,
AlertDialogTrigger,
} from "@/components/ui/alert-dialog";
import { Button } from "@/components/ui/button";
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card";
import {
Dialog,
DialogContent,
DialogDescription,
DialogHeader,
DialogTitle,
} from "@/components/ui/dialog";
import { Input } from "@/components/ui/input";
import { Label } from "@/components/ui/label";
import { api } from "@/lib/api";
import { useTranslation } from "@/lib/i18n";
type RegisteredUser = {
id: string;
username: string;
avatar?: string | null;
createdAt: string;
};
type Invite = {
id: string;
code: string;
creator_id: string;
max_uses: number;
used_count: number;
expires_at: string;
revoked_at: string | null;
created_at: string;
};
type InviteWithUsers = {
id: string;
code: string;
creatorId: string;
maxUses: number;
usedCount: number;
expiresAt: string;
revokedAt: string | null;
createdAt: string;
registeredUsers: RegisteredUser[];
};
const InvitesPage = () => {
const t = useTranslation();
const [invites, setInvites] = useState<Invite[]>([]);
const [maxUses, setMaxUses] = useState(5);
const [expiresInDays, setExpiresInDays] = useState(7);
const [selectedInvite, setSelectedInvite] = useState<InviteWithUsers | null>(null);
const [detailsOpen, setDetailsOpen] = useState(false);
const [copiedId, setCopiedId] = useState<string | null>(null);
const load = async () => {
const data = await api.getInvites();
setInvites(data as Invite[]);
};
useEffect(() => {
load().catch(() => null);
}, []);
const createInvite = async (event: React.FormEvent) => {
event.preventDefault();
await api.createInvite({ maxUses, expiresInDays });
setMaxUses(5);
setExpiresInDays(7);
await load();
};
const revokeInvite = async (id: string) => {
await api.revokeInvite(id);
await load();
};
const viewDetails = async (id: string) => {
const data = await api.getInvite(id);
setSelectedInvite(data as InviteWithUsers);
setDetailsOpen(true);
};
const copyCode = async (code: string, id: string) => {
await navigator.clipboard.writeText(code);
setCopiedId(id);
setTimeout(() => setCopiedId(null), 2000);
};
const formatDateTime = (dateStr: string): string => {
const d = new Date(dateStr);
const pad = (n: number) => n.toString().padStart(2, "0");
return `${d.getFullYear()}-${pad(d.getMonth() + 1)}-${pad(d.getDate())} ${pad(d.getHours())}:${pad(d.getMinutes())}`;
};
const getInviteStatus = (invite: Invite): { key: string; color: string } => {
if (invite.revoked_at) {
return { key: "statusRevoked", color: "bg-red-100 text-red-700" };
}
if (new Date(invite.expires_at) < new Date()) {
return { key: "statusExpired", color: "bg-slate-100 text-slate-700" };
}
if (invite.used_count >= invite.max_uses) {
return { key: "statusExhausted", color: "bg-amber-100 text-amber-700" };
}
return { key: "statusActive", color: "bg-green-100 text-green-700" };
};
return (
<AppShell>
<div className="grid gap-6 lg:grid-cols-2">
<Card className="bg-white">
<CardHeader>
<CardTitle>{t("createInvite")}</CardTitle>
<CardDescription>{t("createInviteDesc")}</CardDescription>
</CardHeader>
<CardContent>
<form className="space-y-4" onSubmit={createInvite}>
<div className="space-y-2">
<Label htmlFor="maxUses">{t("maxUses")}</Label>
<Input
id="maxUses"
type="number"
min={1}
max={20}
value={maxUses}
onChange={(event) => setMaxUses(Number(event.target.value))}
/>
</div>
<div className="space-y-2">
<Label htmlFor="expiresInDays">{t("expiresInDays")}</Label>
<Input
id="expiresInDays"
type="number"
min={1}
max={30}
value={expiresInDays}
onChange={(event) => setExpiresInDays(Number(event.target.value))}
/>
</div>
<Button type="submit">{t("generateInvite")}</Button>
</form>
</CardContent>
</Card>
<Card className="bg-white">
<CardHeader>
<CardTitle>{t("myInvites")}</CardTitle>
<CardDescription>{t("myInvitesDesc")}</CardDescription>
</CardHeader>
<CardContent>
<div className="grid gap-3">
{invites.map((invite) => {
const status = getInviteStatus(invite);
const isActive = status.key === "statusActive";
return (
<div
key={invite.id}
className="flex items-center justify-between rounded-lg bg-slate-50/80 px-4 py-3 transition-colors hover:bg-slate-100/80"
>
<div className="min-w-0 flex-1">
<div className="flex items-center gap-2">
<code className="rounded bg-slate-200 px-2 py-0.5 text-sm font-semibold text-slate-800">
{invite.code}
</code>
<button
type="button"
onClick={() => copyCode(invite.code, invite.id)}
className="rounded p-1 text-slate-400 hover:bg-slate-200 hover:text-slate-600"
title={t("copyCode")}
>
{copiedId === invite.id ? (
<Check className="h-4 w-4 text-green-500" />
) : (
<Copy className="h-4 w-4" />
)}
</button>
<span className={`rounded-full px-2 py-0.5 text-xs font-medium ${status.color}`}>
{t(status.key as keyof ReturnType<typeof useTranslation>)}
</span>
</div>
<div className="mt-1 flex items-center gap-3 text-xs text-slate-500">
<span className="flex items-center gap-1">
<Users className="h-3 w-3" />
{invite.used_count}/{invite.max_uses}
</span>
<span>{t("expiresAt")}: {formatDateTime(invite.expires_at)}</span>
</div>
</div>
<div className="flex items-center gap-1">
<Button
variant="ghost"
size="sm"
onClick={() => viewDetails(invite.id)}
className="text-slate-400 hover:text-blue-500"
>
<Eye className="h-4 w-4" />
</Button>
{isActive && (
<AlertDialog>
<AlertDialogTrigger asChild>
<Button
variant="ghost"
size="sm"
className="text-slate-400 hover:text-red-500"
>
{t("revoke")}
</Button>
</AlertDialogTrigger>
<AlertDialogContent>
<AlertDialogHeader>
<AlertDialogTitle>{t("revokeInvite")}</AlertDialogTitle>
<AlertDialogDescription>
{t("revokeInviteDesc", { code: invite.code })}
</AlertDialogDescription>
</AlertDialogHeader>
<AlertDialogFooter>
<AlertDialogCancel>{t("cancel")}</AlertDialogCancel>
<AlertDialogAction
className="bg-red-500 hover:bg-red-600"
onClick={() => revokeInvite(invite.id)}
>
{t("revoke")}
</AlertDialogAction>
</AlertDialogFooter>
</AlertDialogContent>
</AlertDialog>
)}
</div>
</div>
);
})}
{invites.length === 0 && (
<div className="rounded-lg bg-slate-50/80 p-6 text-center text-sm text-slate-400">
{t("noInvites")}
</div>
)}
</div>
</CardContent>
</Card>
</div>
{/* Details Dialog */}
<Dialog open={detailsOpen} onOpenChange={setDetailsOpen}>
<DialogContent className="max-w-md">
<DialogHeader>
<DialogTitle className="flex items-center gap-2">
<code className="rounded bg-slate-200 px-2 py-0.5 text-base">
{selectedInvite?.code}
</code>
</DialogTitle>
<DialogDescription>
{t("registeredUsers")}
</DialogDescription>
</DialogHeader>
<div className="space-y-3">
{selectedInvite?.registeredUsers && selectedInvite.registeredUsers.length > 0 ? (
selectedInvite.registeredUsers.map((user) => (
<div
key={user.id}
className="flex items-center gap-3 rounded-lg bg-slate-50 p-3"
>
<Avatar username={user.username} src={user.avatar} size="sm" />
<div className="min-w-0 flex-1">
<div className="font-medium text-slate-800">{user.username}</div>
<div className="text-xs text-slate-500">
{formatDateTime(user.createdAt)}
</div>
</div>
</div>
))
) : (
<div className="rounded-lg bg-slate-50 p-6 text-center text-sm text-slate-400">
{t("noRegisteredUsers")}
</div>
)}
</div>
</DialogContent>
</Dialog>
</AppShell>
);
};
export default InvitesPage;

View File

@@ -0,0 +1,20 @@
import "./globals.css";
import { I18nProvider } from "@/lib/i18n";
export const metadata = {
title: "Notify",
description: "简洁提醒应用",
};
const RootLayout = ({ children }: { children: React.ReactNode }) => {
return (
<html lang="zh" suppressHydrationWarning>
<body className="min-h-screen bg-background font-sans antialiased">
<I18nProvider>{children}</I18nProvider>
</body>
</html>
);
};
export default RootLayout;

View File

@@ -0,0 +1,81 @@
"use client";
import { useState } from "react";
import Link from "next/link";
import LanguageSwitcher from "@/components/LanguageSwitcher";
import { Button } from "@/components/ui/button";
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card";
import { Input } from "@/components/ui/input";
import { Label } from "@/components/ui/label";
import { api } from "@/lib/api";
import { setToken } from "@/lib/auth";
import { useTranslation } from "@/lib/i18n";
const LoginPage = () => {
const t = useTranslation();
const [username, setUsername] = useState("");
const [password, setPassword] = useState("");
const [error, setError] = useState("");
const onSubmit = async (event: React.FormEvent) => {
event.preventDefault();
setError("");
try {
const result = await api.login({ username, password });
setToken(result.token);
window.location.href = "/todos";
} catch (err) {
setError(t("loginFailed"));
}
};
return (
<div className="relative flex min-h-screen items-center justify-center bg-gradient-to-br from-slate-50 via-white to-slate-100 px-4 py-10">
<div className="absolute right-4 top-4">
<LanguageSwitcher />
</div>
<Card className="w-full max-w-md border-slate-200/80 shadow-lg">
<CardHeader>
<CardTitle className="text-2xl">{t("login")}</CardTitle>
<CardDescription>{t("loginWelcome")}</CardDescription>
</CardHeader>
<CardContent>
<form className="space-y-4" onSubmit={onSubmit}>
<div className="space-y-2">
<Label htmlFor="username">{t("username")}</Label>
<Input
id="username"
placeholder={t("enterUsername")}
value={username}
onChange={(event) => setUsername(event.target.value)}
/>
</div>
<div className="space-y-2">
<Label htmlFor="password">{t("password")}</Label>
<Input
id="password"
type="password"
placeholder={t("enterPassword")}
value={password}
onChange={(event) => setPassword(event.target.value)}
/>
</div>
{error && <div className="text-sm text-destructive">{error}</div>}
<Button className="w-full" type="submit">
{t("login")}
</Button>
<Link
className="block text-center text-sm text-slate-500 transition hover:text-slate-900"
href="/register"
>
{t("noAccount")}
</Link>
</form>
</CardContent>
</Card>
</div>
);
};
export default LoginPage;

View File

@@ -0,0 +1,94 @@
"use client";
import { useEffect, useState } from "react";
import AppShell from "@/components/AppShell";
import { Button } from "@/components/ui/button";
import { Card, CardContent, CardDescription, CardHeader, CardTitle } from "@/components/ui/card";
import { Input } from "@/components/ui/input";
import { api } from "@/lib/api";
import { useTranslation } from "@/lib/i18n";
import { useNotification } from "@/lib/notification-context";
type Notification = {
id: string;
triggerAt: string;
status: string;
channel: string;
readAt?: string | null;
};
const NotificationsPage = () => {
const t = useTranslation();
const [notifications, setNotifications] = useState<Notification[]>([]);
const { refreshUnreadCount } = useNotification();
const load = async () => {
const data = (await api.getNotifications()) as Notification[];
setNotifications(data);
};
useEffect(() => {
load().catch(() => null);
}, []);
const markRead = async (id: string) => {
await api.markNotificationRead(id);
await load();
await refreshUnreadCount();
};
const markAllRead = async () => {
await api.markAllNotificationsRead();
await load();
await refreshUnreadCount();
};
return (
<AppShell>
<Card className="bg-white">
<CardHeader className="flex-row items-center justify-between space-y-0">
<div>
<CardTitle>{t("notifications")}</CardTitle>
<CardDescription>{t("notificationsDesc")}</CardDescription>
</div>
<div className="flex items-center gap-2">
<Input className="max-w-xs" placeholder={t("searchNotifications")} />
{notifications.length > 0 && (
<Button variant="outline" size="sm" onClick={markAllRead}>
{t("markAllRead")}
</Button>
)}
</div>
</CardHeader>
<CardContent>
<div className="grid gap-3">
{notifications.map((item) => (
<div
key={item.id}
className="flex items-center justify-between rounded-lg bg-slate-50/80 px-4 py-3 transition-colors hover:bg-slate-100/80"
>
<div>
<div className="text-sm font-semibold text-slate-800">
{t("triggerTime")}{new Date(item.triggerAt).toLocaleString()}
</div>
<div className="text-xs text-slate-500">{t("channel")}{item.channel}</div>
</div>
<Button variant="outline" size="sm" onClick={() => markRead(item.id)}>
{t("markRead")}
</Button>
</div>
))}
{notifications.length === 0 && (
<div className="rounded-lg bg-slate-50/80 p-6 text-center text-sm text-slate-400">
{t("noNotification")}
</div>
)}
</div>
</CardContent>
</Card>
</AppShell>
);
};
export default NotificationsPage;

Some files were not shown because too many files have changed in this diff Show More