feat: initial commit - veille-reglementaire v1.0.0

This commit is contained in:
Manus Deploy
2026-04-13 12:05:29 -04:00
commit 347725def5
139 changed files with 27484 additions and 0 deletions

28
server/_core/context.ts Normal file
View File

@@ -0,0 +1,28 @@
import type { CreateExpressContextOptions } from "@trpc/server/adapters/express";
import type { User } from "../../drizzle/schema";
import { sdk } from "./sdk";
export type TrpcContext = {
req: CreateExpressContextOptions["req"];
res: CreateExpressContextOptions["res"];
user: User | null;
};
export async function createContext(
opts: CreateExpressContextOptions
): Promise<TrpcContext> {
let user: User | null = null;
try {
user = await sdk.authenticateRequest(opts.req);
} catch (error) {
// Authentication is optional for public procedures.
user = null;
}
return {
req: opts.req,
res: opts.res,
user,
};
}

48
server/_core/cookies.ts Normal file
View File

@@ -0,0 +1,48 @@
import type { CookieOptions, Request } from "express";
const LOCAL_HOSTS = new Set(["localhost", "127.0.0.1", "::1"]);
function isIpAddress(host: string) {
// Basic IPv4 check and IPv6 presence detection.
if (/^\d{1,3}(\.\d{1,3}){3}$/.test(host)) return true;
return host.includes(":");
}
function isSecureRequest(req: Request) {
if (req.protocol === "https") return true;
const forwardedProto = req.headers["x-forwarded-proto"];
if (!forwardedProto) return false;
const protoList = Array.isArray(forwardedProto)
? forwardedProto
: forwardedProto.split(",");
return protoList.some(proto => proto.trim().toLowerCase() === "https");
}
export function getSessionCookieOptions(
req: Request
): Pick<CookieOptions, "domain" | "httpOnly" | "path" | "sameSite" | "secure"> {
// const hostname = req.hostname;
// const shouldSetDomain =
// hostname &&
// !LOCAL_HOSTS.has(hostname) &&
// !isIpAddress(hostname) &&
// hostname !== "127.0.0.1" &&
// hostname !== "::1";
// const domain =
// shouldSetDomain && !hostname.startsWith(".")
// ? `.${hostname}`
// : shouldSetDomain
// ? hostname
// : undefined;
return {
httpOnly: true,
path: "/",
sameSite: "none",
secure: isSecureRequest(req),
};
}

64
server/_core/dataApi.ts Normal file
View File

@@ -0,0 +1,64 @@
/**
* Quick example (matches curl usage):
* await callDataApi("Youtube/search", {
* query: { gl: "US", hl: "en", q: "manus" },
* })
*/
import { ENV } from "./env";
export type DataApiCallOptions = {
query?: Record<string, unknown>;
body?: Record<string, unknown>;
pathParams?: Record<string, unknown>;
formData?: Record<string, unknown>;
};
export async function callDataApi(
apiId: string,
options: DataApiCallOptions = {}
): Promise<unknown> {
if (!ENV.forgeApiUrl) {
throw new Error("BUILT_IN_FORGE_API_URL is not configured");
}
if (!ENV.forgeApiKey) {
throw new Error("BUILT_IN_FORGE_API_KEY is not configured");
}
// Build the full URL by appending the service path to the base URL
const baseUrl = ENV.forgeApiUrl.endsWith("/") ? ENV.forgeApiUrl : `${ENV.forgeApiUrl}/`;
const fullUrl = new URL("webdevtoken.v1.WebDevService/CallApi", baseUrl).toString();
const response = await fetch(fullUrl, {
method: "POST",
headers: {
accept: "application/json",
"content-type": "application/json",
"connect-protocol-version": "1",
authorization: `Bearer ${ENV.forgeApiKey}`,
},
body: JSON.stringify({
apiId,
query: options.query,
body: options.body,
path_params: options.pathParams,
multipart_form_data: options.formData,
}),
});
if (!response.ok) {
const detail = await response.text().catch(() => "");
throw new Error(
`Data API request failed (${response.status} ${response.statusText})${detail ? `: ${detail}` : ""}`
);
}
const payload = await response.json().catch(() => ({}));
if (payload && typeof payload === "object" && "jsonData" in payload) {
try {
return JSON.parse((payload as Record<string, string>).jsonData ?? "{}");
} catch {
return (payload as Record<string, unknown>).jsonData;
}
}
return payload;
}

10
server/_core/env.ts Normal file
View File

@@ -0,0 +1,10 @@
export const ENV = {
appId: process.env.VITE_APP_ID ?? "",
cookieSecret: process.env.JWT_SECRET ?? "",
databaseUrl: process.env.DATABASE_URL ?? "",
oAuthServerUrl: process.env.OAUTH_SERVER_URL ?? "",
ownerOpenId: process.env.OWNER_OPEN_ID ?? "",
isProduction: process.env.NODE_ENV === "production",
forgeApiUrl: process.env.BUILT_IN_FORGE_API_URL ?? "",
forgeApiKey: process.env.BUILT_IN_FORGE_API_KEY ?? "",
};

View File

@@ -0,0 +1,92 @@
/**
* Image generation helper using internal ImageService
*
* Example usage:
* const { url: imageUrl } = await generateImage({
* prompt: "A serene landscape with mountains"
* });
*
* For editing:
* const { url: imageUrl } = await generateImage({
* prompt: "Add a rainbow to this landscape",
* originalImages: [{
* url: "https://example.com/original.jpg",
* mimeType: "image/jpeg"
* }]
* });
*/
import { storagePut } from "server/storage";
import { ENV } from "./env";
export type GenerateImageOptions = {
prompt: string;
originalImages?: Array<{
url?: string;
b64Json?: string;
mimeType?: string;
}>;
};
export type GenerateImageResponse = {
url?: string;
};
export async function generateImage(
options: GenerateImageOptions
): Promise<GenerateImageResponse> {
if (!ENV.forgeApiUrl) {
throw new Error("BUILT_IN_FORGE_API_URL is not configured");
}
if (!ENV.forgeApiKey) {
throw new Error("BUILT_IN_FORGE_API_KEY is not configured");
}
// Build the full URL by appending the service path to the base URL
const baseUrl = ENV.forgeApiUrl.endsWith("/")
? ENV.forgeApiUrl
: `${ENV.forgeApiUrl}/`;
const fullUrl = new URL(
"images.v1.ImageService/GenerateImage",
baseUrl
).toString();
const response = await fetch(fullUrl, {
method: "POST",
headers: {
accept: "application/json",
"content-type": "application/json",
"connect-protocol-version": "1",
authorization: `Bearer ${ENV.forgeApiKey}`,
},
body: JSON.stringify({
prompt: options.prompt,
original_images: options.originalImages || [],
}),
});
if (!response.ok) {
const detail = await response.text().catch(() => "");
throw new Error(
`Image generation request failed (${response.status} ${response.statusText})${detail ? `: ${detail}` : ""}`
);
}
const result = (await response.json()) as {
image: {
b64Json: string;
mimeType: string;
};
};
const base64Data = result.image.b64Json;
const buffer = Buffer.from(base64Data, "base64");
// Save to S3
const { url } = await storagePut(
`generated/${Date.now()}.png`,
buffer,
result.image.mimeType
);
return {
url,
};
}

100
server/_core/index.ts Normal file
View File

@@ -0,0 +1,100 @@
import "dotenv/config";
import express from "express";
import { createServer } from "http";
import net from "net";
import { createExpressMiddleware } from "@trpc/server/adapters/express";
import * as cron from "node-cron";
import { registerOAuthRoutes } from "./oauth";
import { appRouter } from "../routers";
import { createContext } from "./context";
import { serveStatic, setupVite } from "./vite";
import { runFullImport } from "../importer";
import uploadRoutes from "../uploadRoutes";
import { ensureAdminExists } from "../localAuth";
import { getSetting } from "../db";
function isPortAvailable(port: number): Promise<boolean> {
return new Promise(resolve => {
const server = net.createServer();
server.listen(port, () => { server.close(() => resolve(true)); });
server.on("error", () => resolve(false));
});
}
async function findAvailablePort(startPort: number = 3000): Promise<number> {
for (let port = startPort; port < startPort + 20; port++) {
if (await isPortAvailable(port)) return port;
}
throw new Error(`No available port found starting from ${startPort}`);
}
// ─── Tâche d'import quotidien ─────────────────────────────────────────────────
let cronJob: ReturnType<typeof cron.schedule> | null = null;
async function scheduleDailyImport() {
// Heure configurable, défaut 06:00
const importTime = (await getSetting("import_time")) || "06:00";
const [hour, minute] = importTime.split(":").map(Number);
const cronExpr = `0 ${minute ?? 0} ${hour ?? 6} * * *`;
if (cronJob) {
cronJob.stop();
cronJob = null;
}
cronJob = cron.schedule(cronExpr, async () => {
console.log(`[Cron] Import automatique démarré à ${new Date().toISOString()}`);
try {
const result = await runFullImport();
console.log(`[Cron] Import terminé — Veille: +${result.veille.newRows} | AAP: +${result.aap.newRows}`);
} catch (e) {
console.error("[Cron] Erreur lors de l'import:", e);
}
});
console.log(`[Cron] Import quotidien planifié à ${importTime} (${cronExpr})`);
}
async function startServer() {
const app = express();
const server = createServer(app);
app.use(express.json({ limit: "50mb" }));
app.use(express.urlencoded({ limit: "50mb", extended: true }));
registerOAuthRoutes(app);
app.use(uploadRoutes);
app.use(
"/api/trpc",
createExpressMiddleware({ router: appRouter, createContext })
);
if (process.env.NODE_ENV === "development") {
await setupVite(app, server);
} else {
serveStatic(app);
}
const preferredPort = parseInt(process.env.PORT || "3000");
const port = await findAvailablePort(preferredPort);
if (port !== preferredPort) {
console.log(`Port ${preferredPort} is busy, using port ${port} instead`);
}
server.listen(port, async () => {
console.log(`Server running on http://localhost:${port}/`);
// Initialisation post-démarrage
try {
await ensureAdminExists();
await scheduleDailyImport();
} catch (e) {
console.error("[Init] Erreur d'initialisation:", e);
}
});
}
startServer().catch(console.error);

332
server/_core/llm.ts Normal file
View File

@@ -0,0 +1,332 @@
import { ENV } from "./env";
export type Role = "system" | "user" | "assistant" | "tool" | "function";
export type TextContent = {
type: "text";
text: string;
};
export type ImageContent = {
type: "image_url";
image_url: {
url: string;
detail?: "auto" | "low" | "high";
};
};
export type FileContent = {
type: "file_url";
file_url: {
url: string;
mime_type?: "audio/mpeg" | "audio/wav" | "application/pdf" | "audio/mp4" | "video/mp4" ;
};
};
export type MessageContent = string | TextContent | ImageContent | FileContent;
export type Message = {
role: Role;
content: MessageContent | MessageContent[];
name?: string;
tool_call_id?: string;
};
export type Tool = {
type: "function";
function: {
name: string;
description?: string;
parameters?: Record<string, unknown>;
};
};
export type ToolChoicePrimitive = "none" | "auto" | "required";
export type ToolChoiceByName = { name: string };
export type ToolChoiceExplicit = {
type: "function";
function: {
name: string;
};
};
export type ToolChoice =
| ToolChoicePrimitive
| ToolChoiceByName
| ToolChoiceExplicit;
export type InvokeParams = {
messages: Message[];
tools?: Tool[];
toolChoice?: ToolChoice;
tool_choice?: ToolChoice;
maxTokens?: number;
max_tokens?: number;
outputSchema?: OutputSchema;
output_schema?: OutputSchema;
responseFormat?: ResponseFormat;
response_format?: ResponseFormat;
};
export type ToolCall = {
id: string;
type: "function";
function: {
name: string;
arguments: string;
};
};
export type InvokeResult = {
id: string;
created: number;
model: string;
choices: Array<{
index: number;
message: {
role: Role;
content: string | Array<TextContent | ImageContent | FileContent>;
tool_calls?: ToolCall[];
};
finish_reason: string | null;
}>;
usage?: {
prompt_tokens: number;
completion_tokens: number;
total_tokens: number;
};
};
export type JsonSchema = {
name: string;
schema: Record<string, unknown>;
strict?: boolean;
};
export type OutputSchema = JsonSchema;
export type ResponseFormat =
| { type: "text" }
| { type: "json_object" }
| { type: "json_schema"; json_schema: JsonSchema };
const ensureArray = (
value: MessageContent | MessageContent[]
): MessageContent[] => (Array.isArray(value) ? value : [value]);
const normalizeContentPart = (
part: MessageContent
): TextContent | ImageContent | FileContent => {
if (typeof part === "string") {
return { type: "text", text: part };
}
if (part.type === "text") {
return part;
}
if (part.type === "image_url") {
return part;
}
if (part.type === "file_url") {
return part;
}
throw new Error("Unsupported message content part");
};
const normalizeMessage = (message: Message) => {
const { role, name, tool_call_id } = message;
if (role === "tool" || role === "function") {
const content = ensureArray(message.content)
.map(part => (typeof part === "string" ? part : JSON.stringify(part)))
.join("\n");
return {
role,
name,
tool_call_id,
content,
};
}
const contentParts = ensureArray(message.content).map(normalizeContentPart);
// If there's only text content, collapse to a single string for compatibility
if (contentParts.length === 1 && contentParts[0].type === "text") {
return {
role,
name,
content: contentParts[0].text,
};
}
return {
role,
name,
content: contentParts,
};
};
const normalizeToolChoice = (
toolChoice: ToolChoice | undefined,
tools: Tool[] | undefined
): "none" | "auto" | ToolChoiceExplicit | undefined => {
if (!toolChoice) return undefined;
if (toolChoice === "none" || toolChoice === "auto") {
return toolChoice;
}
if (toolChoice === "required") {
if (!tools || tools.length === 0) {
throw new Error(
"tool_choice 'required' was provided but no tools were configured"
);
}
if (tools.length > 1) {
throw new Error(
"tool_choice 'required' needs a single tool or specify the tool name explicitly"
);
}
return {
type: "function",
function: { name: tools[0].function.name },
};
}
if ("name" in toolChoice) {
return {
type: "function",
function: { name: toolChoice.name },
};
}
return toolChoice;
};
const resolveApiUrl = () =>
ENV.forgeApiUrl && ENV.forgeApiUrl.trim().length > 0
? `${ENV.forgeApiUrl.replace(/\/$/, "")}/v1/chat/completions`
: "https://forge.manus.im/v1/chat/completions";
const assertApiKey = () => {
if (!ENV.forgeApiKey) {
throw new Error("OPENAI_API_KEY is not configured");
}
};
const normalizeResponseFormat = ({
responseFormat,
response_format,
outputSchema,
output_schema,
}: {
responseFormat?: ResponseFormat;
response_format?: ResponseFormat;
outputSchema?: OutputSchema;
output_schema?: OutputSchema;
}):
| { type: "json_schema"; json_schema: JsonSchema }
| { type: "text" }
| { type: "json_object" }
| undefined => {
const explicitFormat = responseFormat || response_format;
if (explicitFormat) {
if (
explicitFormat.type === "json_schema" &&
!explicitFormat.json_schema?.schema
) {
throw new Error(
"responseFormat json_schema requires a defined schema object"
);
}
return explicitFormat;
}
const schema = outputSchema || output_schema;
if (!schema) return undefined;
if (!schema.name || !schema.schema) {
throw new Error("outputSchema requires both name and schema");
}
return {
type: "json_schema",
json_schema: {
name: schema.name,
schema: schema.schema,
...(typeof schema.strict === "boolean" ? { strict: schema.strict } : {}),
},
};
};
export async function invokeLLM(params: InvokeParams): Promise<InvokeResult> {
assertApiKey();
const {
messages,
tools,
toolChoice,
tool_choice,
outputSchema,
output_schema,
responseFormat,
response_format,
} = params;
const payload: Record<string, unknown> = {
model: "gemini-2.5-flash",
messages: messages.map(normalizeMessage),
};
if (tools && tools.length > 0) {
payload.tools = tools;
}
const normalizedToolChoice = normalizeToolChoice(
toolChoice || tool_choice,
tools
);
if (normalizedToolChoice) {
payload.tool_choice = normalizedToolChoice;
}
payload.max_tokens = 32768
payload.thinking = {
"budget_tokens": 128
}
const normalizedResponseFormat = normalizeResponseFormat({
responseFormat,
response_format,
outputSchema,
output_schema,
});
if (normalizedResponseFormat) {
payload.response_format = normalizedResponseFormat;
}
const response = await fetch(resolveApiUrl(), {
method: "POST",
headers: {
"content-type": "application/json",
authorization: `Bearer ${ENV.forgeApiKey}`,
},
body: JSON.stringify(payload),
});
if (!response.ok) {
const errorText = await response.text();
throw new Error(
`LLM invoke failed: ${response.status} ${response.statusText} ${errorText}`
);
}
return (await response.json()) as InvokeResult;
}

319
server/_core/map.ts Normal file
View File

@@ -0,0 +1,319 @@
/**
* Google Maps API Integration for Manus WebDev Templates
*
* Main function: makeRequest<T>(endpoint, params) - Makes authenticated requests to Google Maps APIs
* All credentials are automatically injected. Array parameters use | as separator.
*
* See API examples below the type definitions for usage patterns.
*/
import { ENV } from "./env";
// ============================================================================
// Configuration
// ============================================================================
type MapsConfig = {
baseUrl: string;
apiKey: string;
};
function getMapsConfig(): MapsConfig {
const baseUrl = ENV.forgeApiUrl;
const apiKey = ENV.forgeApiKey;
if (!baseUrl || !apiKey) {
throw new Error(
"Google Maps proxy credentials missing: set BUILT_IN_FORGE_API_URL and BUILT_IN_FORGE_API_KEY"
);
}
return {
baseUrl: baseUrl.replace(/\/+$/, ""),
apiKey,
};
}
// ============================================================================
// Core Request Handler
// ============================================================================
interface RequestOptions {
method?: "GET" | "POST";
body?: Record<string, unknown>;
}
/**
* Make authenticated requests to Google Maps APIs
*
* @param endpoint - The API endpoint (e.g., "/maps/api/geocode/json")
* @param params - Query parameters for the request
* @param options - Additional request options
* @returns The API response
*/
export async function makeRequest<T = unknown>(
endpoint: string,
params: Record<string, unknown> = {},
options: RequestOptions = {}
): Promise<T> {
const { baseUrl, apiKey } = getMapsConfig();
// Construct full URL: baseUrl + /v1/maps/proxy + endpoint
const url = new URL(`${baseUrl}/v1/maps/proxy${endpoint}`);
// Add API key as query parameter (standard Google Maps API authentication)
url.searchParams.append("key", apiKey);
// Add other query parameters
Object.entries(params).forEach(([key, value]) => {
if (value !== undefined && value !== null) {
url.searchParams.append(key, String(value));
}
});
const response = await fetch(url.toString(), {
method: options.method || "GET",
headers: {
"Content-Type": "application/json",
},
body: options.body ? JSON.stringify(options.body) : undefined,
});
if (!response.ok) {
const errorText = await response.text();
throw new Error(
`Google Maps API request failed (${response.status} ${response.statusText}): ${errorText}`
);
}
return (await response.json()) as T;
}
// ============================================================================
// Type Definitions
// ============================================================================
export type TravelMode = "driving" | "walking" | "bicycling" | "transit";
export type MapType = "roadmap" | "satellite" | "terrain" | "hybrid";
export type SpeedUnit = "KPH" | "MPH";
export type LatLng = {
lat: number;
lng: number;
};
export type DirectionsResult = {
routes: Array<{
legs: Array<{
distance: { text: string; value: number };
duration: { text: string; value: number };
start_address: string;
end_address: string;
start_location: LatLng;
end_location: LatLng;
steps: Array<{
distance: { text: string; value: number };
duration: { text: string; value: number };
html_instructions: string;
travel_mode: string;
start_location: LatLng;
end_location: LatLng;
}>;
}>;
overview_polyline: { points: string };
summary: string;
warnings: string[];
waypoint_order: number[];
}>;
status: string;
};
export type DistanceMatrixResult = {
rows: Array<{
elements: Array<{
distance: { text: string; value: number };
duration: { text: string; value: number };
status: string;
}>;
}>;
origin_addresses: string[];
destination_addresses: string[];
status: string;
};
export type GeocodingResult = {
results: Array<{
address_components: Array<{
long_name: string;
short_name: string;
types: string[];
}>;
formatted_address: string;
geometry: {
location: LatLng;
location_type: string;
viewport: {
northeast: LatLng;
southwest: LatLng;
};
};
place_id: string;
types: string[];
}>;
status: string;
};
export type PlacesSearchResult = {
results: Array<{
place_id: string;
name: string;
formatted_address: string;
geometry: {
location: LatLng;
};
rating?: number;
user_ratings_total?: number;
business_status?: string;
types: string[];
}>;
status: string;
};
export type PlaceDetailsResult = {
result: {
place_id: string;
name: string;
formatted_address: string;
formatted_phone_number?: string;
international_phone_number?: string;
website?: string;
rating?: number;
user_ratings_total?: number;
reviews?: Array<{
author_name: string;
rating: number;
text: string;
time: number;
}>;
opening_hours?: {
open_now: boolean;
weekday_text: string[];
};
geometry: {
location: LatLng;
};
};
status: string;
};
export type ElevationResult = {
results: Array<{
elevation: number;
location: LatLng;
resolution: number;
}>;
status: string;
};
export type TimeZoneResult = {
dstOffset: number;
rawOffset: number;
status: string;
timeZoneId: string;
timeZoneName: string;
};
export type RoadsResult = {
snappedPoints: Array<{
location: LatLng;
originalIndex?: number;
placeId: string;
}>;
};
// ============================================================================
// Google Maps API Reference
// ============================================================================
/**
* GEOCODING - Convert between addresses and coordinates
* Endpoint: /maps/api/geocode/json
* Input: { address: string } OR { latlng: string } // latlng: "37.42,-122.08"
* Output: GeocodingResult // results[0].geometry.location, results[0].formatted_address
*/
/**
* DIRECTIONS - Get navigation routes between locations
* Endpoint: /maps/api/directions/json
* Input: { origin: string, destination: string, mode?: TravelMode, waypoints?: string, alternatives?: boolean }
* Output: DirectionsResult // routes[0].legs[0].distance, duration, steps
*/
/**
* DISTANCE MATRIX - Calculate travel times/distances for multiple origin-destination pairs
* Endpoint: /maps/api/distancematrix/json
* Input: { origins: string, destinations: string, mode?: TravelMode, units?: "metric"|"imperial" } // origins: "NYC|Boston"
* Output: DistanceMatrixResult // rows[0].elements[1] = first origin to second destination
*/
/**
* PLACE SEARCH - Find businesses/POIs by text query
* Endpoint: /maps/api/place/textsearch/json
* Input: { query: string, location?: string, radius?: number, type?: string } // location: "40.7,-74.0"
* Output: PlacesSearchResult // results[].name, rating, geometry.location, place_id
*/
/**
* NEARBY SEARCH - Find places near a specific location
* Endpoint: /maps/api/place/nearbysearch/json
* Input: { location: string, radius: number, type?: string, keyword?: string } // location: "40.7,-74.0"
* Output: PlacesSearchResult
*/
/**
* PLACE DETAILS - Get comprehensive information about a specific place
* Endpoint: /maps/api/place/details/json
* Input: { place_id: string, fields?: string } // fields: "name,rating,opening_hours,website"
* Output: PlaceDetailsResult // result.name, rating, opening_hours, etc.
*/
/**
* ELEVATION - Get altitude data for geographic points
* Endpoint: /maps/api/elevation/json
* Input: { locations?: string, path?: string, samples?: number } // locations: "39.73,-104.98|36.45,-116.86"
* Output: ElevationResult // results[].elevation (meters)
*/
/**
* TIME ZONE - Get timezone information for a location
* Endpoint: /maps/api/timezone/json
* Input: { location: string, timestamp: number } // timestamp: Math.floor(Date.now()/1000)
* Output: TimeZoneResult // timeZoneId, timeZoneName
*/
/**
* ROADS - Snap GPS traces to roads, find nearest roads, get speed limits
* - /v1/snapToRoads: Input: { path: string, interpolate?: boolean } // path: "lat,lng|lat,lng"
* - /v1/nearestRoads: Input: { points: string } // points: "lat,lng|lat,lng"
* - /v1/speedLimits: Input: { path: string, units?: SpeedUnit }
* Output: RoadsResult
*/
/**
* PLACE AUTOCOMPLETE - Real-time place suggestions as user types
* Endpoint: /maps/api/place/autocomplete/json
* Input: { input: string, location?: string, radius?: number }
* Output: { predictions: Array<{ description: string, place_id: string }> }
*/
/**
* STATIC MAPS - Generate map images as URLs (for emails, reports, <img> tags)
* Endpoint: /maps/api/staticmap
* Input: URL params - center: string, zoom: number, size: string, markers?: string, maptype?: MapType
* Output: Image URL (not JSON) - use directly in <img src={url} />
* Note: Construct URL manually with getMapsConfig() for auth
*/

View File

@@ -0,0 +1,114 @@
import { TRPCError } from "@trpc/server";
import { ENV } from "./env";
export type NotificationPayload = {
title: string;
content: string;
};
const TITLE_MAX_LENGTH = 1200;
const CONTENT_MAX_LENGTH = 20000;
const trimValue = (value: string): string => value.trim();
const isNonEmptyString = (value: unknown): value is string =>
typeof value === "string" && value.trim().length > 0;
const buildEndpointUrl = (baseUrl: string): string => {
const normalizedBase = baseUrl.endsWith("/")
? baseUrl
: `${baseUrl}/`;
return new URL(
"webdevtoken.v1.WebDevService/SendNotification",
normalizedBase
).toString();
};
const validatePayload = (input: NotificationPayload): NotificationPayload => {
if (!isNonEmptyString(input.title)) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Notification title is required.",
});
}
if (!isNonEmptyString(input.content)) {
throw new TRPCError({
code: "BAD_REQUEST",
message: "Notification content is required.",
});
}
const title = trimValue(input.title);
const content = trimValue(input.content);
if (title.length > TITLE_MAX_LENGTH) {
throw new TRPCError({
code: "BAD_REQUEST",
message: `Notification title must be at most ${TITLE_MAX_LENGTH} characters.`,
});
}
if (content.length > CONTENT_MAX_LENGTH) {
throw new TRPCError({
code: "BAD_REQUEST",
message: `Notification content must be at most ${CONTENT_MAX_LENGTH} characters.`,
});
}
return { title, content };
};
/**
* Dispatches a project-owner notification through the Manus Notification Service.
* Returns `true` if the request was accepted, `false` when the upstream service
* cannot be reached (callers can fall back to email/slack). Validation errors
* bubble up as TRPC errors so callers can fix the payload.
*/
export async function notifyOwner(
payload: NotificationPayload
): Promise<boolean> {
const { title, content } = validatePayload(payload);
if (!ENV.forgeApiUrl) {
throw new TRPCError({
code: "INTERNAL_SERVER_ERROR",
message: "Notification service URL is not configured.",
});
}
if (!ENV.forgeApiKey) {
throw new TRPCError({
code: "INTERNAL_SERVER_ERROR",
message: "Notification service API key is not configured.",
});
}
const endpoint = buildEndpointUrl(ENV.forgeApiUrl);
try {
const response = await fetch(endpoint, {
method: "POST",
headers: {
accept: "application/json",
authorization: `Bearer ${ENV.forgeApiKey}`,
"content-type": "application/json",
"connect-protocol-version": "1",
},
body: JSON.stringify({ title, content }),
});
if (!response.ok) {
const detail = await response.text().catch(() => "");
console.warn(
`[Notification] Failed to notify owner (${response.status} ${response.statusText})${
detail ? `: ${detail}` : ""
}`
);
return false;
}
return true;
} catch (error) {
console.warn("[Notification] Error calling notification service:", error);
return false;
}
}

53
server/_core/oauth.ts Normal file
View File

@@ -0,0 +1,53 @@
import { COOKIE_NAME, ONE_YEAR_MS } from "@shared/const";
import type { Express, Request, Response } from "express";
import * as db from "../db";
import { getSessionCookieOptions } from "./cookies";
import { sdk } from "./sdk";
function getQueryParam(req: Request, key: string): string | undefined {
const value = req.query[key];
return typeof value === "string" ? value : undefined;
}
export function registerOAuthRoutes(app: Express) {
app.get("/api/oauth/callback", async (req: Request, res: Response) => {
const code = getQueryParam(req, "code");
const state = getQueryParam(req, "state");
if (!code || !state) {
res.status(400).json({ error: "code and state are required" });
return;
}
try {
const tokenResponse = await sdk.exchangeCodeForToken(code, state);
const userInfo = await sdk.getUserInfo(tokenResponse.accessToken);
if (!userInfo.openId) {
res.status(400).json({ error: "openId missing from user info" });
return;
}
await db.upsertUser({
openId: userInfo.openId,
name: userInfo.name || null,
email: userInfo.email ?? null,
loginMethod: userInfo.loginMethod ?? userInfo.platform ?? null,
lastSignedIn: new Date(),
});
const sessionToken = await sdk.createSessionToken(userInfo.openId, {
name: userInfo.name || "",
expiresInMs: ONE_YEAR_MS,
});
const cookieOptions = getSessionCookieOptions(req);
res.cookie(COOKIE_NAME, sessionToken, { ...cookieOptions, maxAge: ONE_YEAR_MS });
res.redirect(302, "/");
} catch (error) {
console.error("[OAuth] Callback failed", error);
res.status(500).json({ error: "OAuth callback failed" });
}
});
}

304
server/_core/sdk.ts Normal file
View File

@@ -0,0 +1,304 @@
import { AXIOS_TIMEOUT_MS, COOKIE_NAME, ONE_YEAR_MS } from "@shared/const";
import { ForbiddenError } from "@shared/_core/errors";
import axios, { type AxiosInstance } from "axios";
import { parse as parseCookieHeader } from "cookie";
import type { Request } from "express";
import { SignJWT, jwtVerify } from "jose";
import type { User } from "../../drizzle/schema";
import * as db from "../db";
import { ENV } from "./env";
import type {
ExchangeTokenRequest,
ExchangeTokenResponse,
GetUserInfoResponse,
GetUserInfoWithJwtRequest,
GetUserInfoWithJwtResponse,
} from "./types/manusTypes";
// Utility function
const isNonEmptyString = (value: unknown): value is string =>
typeof value === "string" && value.length > 0;
export type SessionPayload = {
openId: string;
appId: string;
name: string;
};
const EXCHANGE_TOKEN_PATH = `/webdev.v1.WebDevAuthPublicService/ExchangeToken`;
const GET_USER_INFO_PATH = `/webdev.v1.WebDevAuthPublicService/GetUserInfo`;
const GET_USER_INFO_WITH_JWT_PATH = `/webdev.v1.WebDevAuthPublicService/GetUserInfoWithJwt`;
class OAuthService {
constructor(private client: ReturnType<typeof axios.create>) {
console.log("[OAuth] Initialized with baseURL:", ENV.oAuthServerUrl);
if (!ENV.oAuthServerUrl) {
console.error(
"[OAuth] ERROR: OAUTH_SERVER_URL is not configured! Set OAUTH_SERVER_URL environment variable."
);
}
}
private decodeState(state: string): string {
const redirectUri = atob(state);
return redirectUri;
}
async getTokenByCode(
code: string,
state: string
): Promise<ExchangeTokenResponse> {
const payload: ExchangeTokenRequest = {
clientId: ENV.appId,
grantType: "authorization_code",
code,
redirectUri: this.decodeState(state),
};
const { data } = await this.client.post<ExchangeTokenResponse>(
EXCHANGE_TOKEN_PATH,
payload
);
return data;
}
async getUserInfoByToken(
token: ExchangeTokenResponse
): Promise<GetUserInfoResponse> {
const { data } = await this.client.post<GetUserInfoResponse>(
GET_USER_INFO_PATH,
{
accessToken: token.accessToken,
}
);
return data;
}
}
const createOAuthHttpClient = (): AxiosInstance =>
axios.create({
baseURL: ENV.oAuthServerUrl,
timeout: AXIOS_TIMEOUT_MS,
});
class SDKServer {
private readonly client: AxiosInstance;
private readonly oauthService: OAuthService;
constructor(client: AxiosInstance = createOAuthHttpClient()) {
this.client = client;
this.oauthService = new OAuthService(this.client);
}
private deriveLoginMethod(
platforms: unknown,
fallback: string | null | undefined
): string | null {
if (fallback && fallback.length > 0) return fallback;
if (!Array.isArray(platforms) || platforms.length === 0) return null;
const set = new Set<string>(
platforms.filter((p): p is string => typeof p === "string")
);
if (set.has("REGISTERED_PLATFORM_EMAIL")) return "email";
if (set.has("REGISTERED_PLATFORM_GOOGLE")) return "google";
if (set.has("REGISTERED_PLATFORM_APPLE")) return "apple";
if (
set.has("REGISTERED_PLATFORM_MICROSOFT") ||
set.has("REGISTERED_PLATFORM_AZURE")
)
return "microsoft";
if (set.has("REGISTERED_PLATFORM_GITHUB")) return "github";
const first = Array.from(set)[0];
return first ? first.toLowerCase() : null;
}
/**
* Exchange OAuth authorization code for access token
* @example
* const tokenResponse = await sdk.exchangeCodeForToken(code, state);
*/
async exchangeCodeForToken(
code: string,
state: string
): Promise<ExchangeTokenResponse> {
return this.oauthService.getTokenByCode(code, state);
}
/**
* Get user information using access token
* @example
* const userInfo = await sdk.getUserInfo(tokenResponse.accessToken);
*/
async getUserInfo(accessToken: string): Promise<GetUserInfoResponse> {
const data = await this.oauthService.getUserInfoByToken({
accessToken,
} as ExchangeTokenResponse);
const loginMethod = this.deriveLoginMethod(
(data as any)?.platforms,
(data as any)?.platform ?? data.platform ?? null
);
return {
...(data as any),
platform: loginMethod,
loginMethod,
} as GetUserInfoResponse;
}
private parseCookies(cookieHeader: string | undefined) {
if (!cookieHeader) {
return new Map<string, string>();
}
const parsed = parseCookieHeader(cookieHeader);
return new Map(Object.entries(parsed));
}
private getSessionSecret() {
const secret = ENV.cookieSecret;
return new TextEncoder().encode(secret);
}
/**
* Create a session token for a Manus user openId
* @example
* const sessionToken = await sdk.createSessionToken(userInfo.openId);
*/
async createSessionToken(
openId: string,
options: { expiresInMs?: number; name?: string } = {}
): Promise<string> {
return this.signSession(
{
openId,
appId: ENV.appId,
name: options.name || "",
},
options
);
}
async signSession(
payload: SessionPayload,
options: { expiresInMs?: number } = {}
): Promise<string> {
const issuedAt = Date.now();
const expiresInMs = options.expiresInMs ?? ONE_YEAR_MS;
const expirationSeconds = Math.floor((issuedAt + expiresInMs) / 1000);
const secretKey = this.getSessionSecret();
return new SignJWT({
openId: payload.openId,
appId: payload.appId,
name: payload.name,
})
.setProtectedHeader({ alg: "HS256", typ: "JWT" })
.setExpirationTime(expirationSeconds)
.sign(secretKey);
}
async verifySession(
cookieValue: string | undefined | null
): Promise<{ openId: string; appId: string; name: string } | null> {
if (!cookieValue) {
console.warn("[Auth] Missing session cookie");
return null;
}
try {
const secretKey = this.getSessionSecret();
const { payload } = await jwtVerify(cookieValue, secretKey, {
algorithms: ["HS256"],
});
const { openId, appId, name } = payload as Record<string, unknown>;
if (
!isNonEmptyString(openId) ||
!isNonEmptyString(appId) ||
!isNonEmptyString(name)
) {
console.warn("[Auth] Session payload missing required fields");
return null;
}
return {
openId,
appId,
name,
};
} catch (error) {
console.warn("[Auth] Session verification failed", String(error));
return null;
}
}
async getUserInfoWithJwt(
jwtToken: string
): Promise<GetUserInfoWithJwtResponse> {
const payload: GetUserInfoWithJwtRequest = {
jwtToken,
projectId: ENV.appId,
};
const { data } = await this.client.post<GetUserInfoWithJwtResponse>(
GET_USER_INFO_WITH_JWT_PATH,
payload
);
const loginMethod = this.deriveLoginMethod(
(data as any)?.platforms,
(data as any)?.platform ?? data.platform ?? null
);
return {
...(data as any),
platform: loginMethod,
loginMethod,
} as GetUserInfoWithJwtResponse;
}
async authenticateRequest(req: Request): Promise<User> {
// Regular authentication flow
const cookies = this.parseCookies(req.headers.cookie);
const sessionCookie = cookies.get(COOKIE_NAME);
const session = await this.verifySession(sessionCookie);
if (!session) {
throw ForbiddenError("Invalid session cookie");
}
const sessionUserId = session.openId;
const signedInAt = new Date();
let user = await db.getUserByOpenId(sessionUserId);
// If user not in DB, sync from OAuth server automatically
if (!user) {
try {
const userInfo = await this.getUserInfoWithJwt(sessionCookie ?? "");
await db.upsertUser({
openId: userInfo.openId,
name: userInfo.name || null,
email: userInfo.email ?? null,
loginMethod: userInfo.loginMethod ?? userInfo.platform ?? null,
lastSignedIn: signedInAt,
});
user = await db.getUserByOpenId(userInfo.openId);
} catch (error) {
console.error("[Auth] Failed to sync user from OAuth:", error);
throw ForbiddenError("Failed to sync user info");
}
}
if (!user) {
throw ForbiddenError("User not found");
}
await db.upsertUser({
openId: user.openId,
lastSignedIn: signedInAt,
});
return user;
}
}
export const sdk = new SDKServer();

View File

@@ -0,0 +1,29 @@
import { z } from "zod";
import { notifyOwner } from "./notification";
import { adminProcedure, publicProcedure, router } from "./trpc";
export const systemRouter = router({
health: publicProcedure
.input(
z.object({
timestamp: z.number().min(0, "timestamp cannot be negative"),
})
)
.query(() => ({
ok: true,
})),
notifyOwner: adminProcedure
.input(
z.object({
title: z.string().min(1, "title is required"),
content: z.string().min(1, "content is required"),
})
)
.mutation(async ({ input }) => {
const delivered = await notifyOwner(input);
return {
success: delivered,
} as const;
}),
});

45
server/_core/trpc.ts Normal file
View File

@@ -0,0 +1,45 @@
import { NOT_ADMIN_ERR_MSG, UNAUTHED_ERR_MSG } from '@shared/const';
import { initTRPC, TRPCError } from "@trpc/server";
import superjson from "superjson";
import type { TrpcContext } from "./context";
const t = initTRPC.context<TrpcContext>().create({
transformer: superjson,
});
export const router = t.router;
export const publicProcedure = t.procedure;
const requireUser = t.middleware(async opts => {
const { ctx, next } = opts;
if (!ctx.user) {
throw new TRPCError({ code: "UNAUTHORIZED", message: UNAUTHED_ERR_MSG });
}
return next({
ctx: {
...ctx,
user: ctx.user,
},
});
});
export const protectedProcedure = t.procedure.use(requireUser);
export const adminProcedure = t.procedure.use(
t.middleware(async opts => {
const { ctx, next } = opts;
if (!ctx.user || ctx.user.role !== 'admin') {
throw new TRPCError({ code: "FORBIDDEN", message: NOT_ADMIN_ERR_MSG });
}
return next({
ctx: {
...ctx,
user: ctx.user,
},
});
}),
);

6
server/_core/types/cookie.d.ts vendored Normal file
View File

@@ -0,0 +1,6 @@
declare module "cookie" {
export function parse(
str: string,
options?: Record<string, unknown>
): Record<string, string>;
}

View File

@@ -0,0 +1,69 @@
// WebDev Auth TypeScript types
// Auto-generated from protobuf definitions
// Generated on: 2025-09-24T05:57:57.338Z
export interface AuthorizeRequest {
redirectUri: string;
projectId: string;
state: string;
responseType: string;
scope: string;
}
export interface AuthorizeResponse {
redirectUrl: string;
}
export interface ExchangeTokenRequest {
grantType: string;
code: string;
refreshToken?: string;
clientId: string;
clientSecret?: string;
redirectUri: string;
}
export interface ExchangeTokenResponse {
accessToken: string;
tokenType: string;
expiresIn: number;
refreshToken?: string;
scope: string;
idToken: string;
}
export interface GetUserInfoRequest {
accessToken: string;
}
export interface GetUserInfoResponse {
openId: string;
projectId: string;
name: string;
email?: string | null;
platform?: string | null;
loginMethod?: string | null;
}
export interface CanAccessRequest {
openId: string;
projectId: string;
}
export interface CanAccessResponse {
canAccess: boolean;
}
export interface GetUserInfoWithJwtRequest {
jwtToken: string;
projectId: string;
}
export interface GetUserInfoWithJwtResponse {
openId: string;
projectId: string;
name: string;
email?: string | null;
platform?: string | null;
loginMethod?: string | null;
}

67
server/_core/vite.ts Normal file
View File

@@ -0,0 +1,67 @@
import express, { type Express } from "express";
import fs from "fs";
import { type Server } from "http";
import { nanoid } from "nanoid";
import path from "path";
import { createServer as createViteServer } from "vite";
import viteConfig from "../../vite.config";
export async function setupVite(app: Express, server: Server) {
const serverOptions = {
middlewareMode: true,
hmr: { server },
allowedHosts: true as const,
};
const vite = await createViteServer({
...viteConfig,
configFile: false,
server: serverOptions,
appType: "custom",
});
app.use(vite.middlewares);
app.use("*", async (req, res, next) => {
const url = req.originalUrl;
try {
const clientTemplate = path.resolve(
import.meta.dirname,
"../..",
"client",
"index.html"
);
// always reload the index.html file from disk incase it changes
let template = await fs.promises.readFile(clientTemplate, "utf-8");
template = template.replace(
`src="/src/main.tsx"`,
`src="/src/main.tsx?v=${nanoid()}"`
);
const page = await vite.transformIndexHtml(url, template);
res.status(200).set({ "Content-Type": "text/html" }).end(page);
} catch (e) {
vite.ssrFixStacktrace(e as Error);
next(e);
}
});
}
export function serveStatic(app: Express) {
const distPath =
process.env.NODE_ENV === "development"
? path.resolve(import.meta.dirname, "../..", "dist", "public")
: path.resolve(import.meta.dirname, "public");
if (!fs.existsSync(distPath)) {
console.error(
`Could not find the build directory: ${distPath}, make sure to build the client first`
);
}
app.use(express.static(distPath));
// fall through to index.html if the file doesn't exist
app.use("*", (_req, res) => {
res.sendFile(path.resolve(distPath, "index.html"));
});
}

View File

@@ -0,0 +1,284 @@
/**
* Voice transcription helper using internal Speech-to-Text service
*
* Frontend implementation guide:
* 1. Capture audio using MediaRecorder API
* 2. Upload audio to storage (e.g., S3) to get URL
* 3. Call transcription with the URL
*
* Example usage:
* ```tsx
* // Frontend component
* const transcribeMutation = trpc.voice.transcribe.useMutation({
* onSuccess: (data) => {
* console.log(data.text); // Full transcription
* console.log(data.language); // Detected language
* console.log(data.segments); // Timestamped segments
* }
* });
*
* // After uploading audio to storage
* transcribeMutation.mutate({
* audioUrl: uploadedAudioUrl,
* language: 'en', // optional
* prompt: 'Transcribe the meeting' // optional
* });
* ```
*/
import { ENV } from "./env";
export type TranscribeOptions = {
audioUrl: string; // URL to the audio file (e.g., S3 URL)
language?: string; // Optional: specify language code (e.g., "en", "es", "zh")
prompt?: string; // Optional: custom prompt for the transcription
};
// Native Whisper API segment format
export type WhisperSegment = {
id: number;
seek: number;
start: number;
end: number;
text: string;
tokens: number[];
temperature: number;
avg_logprob: number;
compression_ratio: number;
no_speech_prob: number;
};
// Native Whisper API response format
export type WhisperResponse = {
task: "transcribe";
language: string;
duration: number;
text: string;
segments: WhisperSegment[];
};
export type TranscriptionResponse = WhisperResponse; // Return native Whisper API response directly
export type TranscriptionError = {
error: string;
code: "FILE_TOO_LARGE" | "INVALID_FORMAT" | "TRANSCRIPTION_FAILED" | "UPLOAD_FAILED" | "SERVICE_ERROR";
details?: string;
};
/**
* Transcribe audio to text using the internal Speech-to-Text service
*
* @param options - Audio data and metadata
* @returns Transcription result or error
*/
export async function transcribeAudio(
options: TranscribeOptions
): Promise<TranscriptionResponse | TranscriptionError> {
try {
// Step 1: Validate environment configuration
if (!ENV.forgeApiUrl) {
return {
error: "Voice transcription service is not configured",
code: "SERVICE_ERROR",
details: "BUILT_IN_FORGE_API_URL is not set"
};
}
if (!ENV.forgeApiKey) {
return {
error: "Voice transcription service authentication is missing",
code: "SERVICE_ERROR",
details: "BUILT_IN_FORGE_API_KEY is not set"
};
}
// Step 2: Download audio from URL
let audioBuffer: Buffer;
let mimeType: string;
try {
const response = await fetch(options.audioUrl);
if (!response.ok) {
return {
error: "Failed to download audio file",
code: "INVALID_FORMAT",
details: `HTTP ${response.status}: ${response.statusText}`
};
}
audioBuffer = Buffer.from(await response.arrayBuffer());
mimeType = response.headers.get('content-type') || 'audio/mpeg';
// Check file size (16MB limit)
const sizeMB = audioBuffer.length / (1024 * 1024);
if (sizeMB > 16) {
return {
error: "Audio file exceeds maximum size limit",
code: "FILE_TOO_LARGE",
details: `File size is ${sizeMB.toFixed(2)}MB, maximum allowed is 16MB`
};
}
} catch (error) {
return {
error: "Failed to fetch audio file",
code: "SERVICE_ERROR",
details: error instanceof Error ? error.message : "Unknown error"
};
}
// Step 3: Create FormData for multipart upload to Whisper API
const formData = new FormData();
// Create a Blob from the buffer and append to form
const filename = `audio.${getFileExtension(mimeType)}`;
const audioBlob = new Blob([new Uint8Array(audioBuffer)], { type: mimeType });
formData.append("file", audioBlob, filename);
formData.append("model", "whisper-1");
formData.append("response_format", "verbose_json");
// Add prompt - use custom prompt if provided, otherwise generate based on language
const prompt = options.prompt || (
options.language
? `Transcribe the user's voice to text, the user's working language is ${getLanguageName(options.language)}`
: "Transcribe the user's voice to text"
);
formData.append("prompt", prompt);
// Step 4: Call the transcription service
const baseUrl = ENV.forgeApiUrl.endsWith("/")
? ENV.forgeApiUrl
: `${ENV.forgeApiUrl}/`;
const fullUrl = new URL(
"v1/audio/transcriptions",
baseUrl
).toString();
const response = await fetch(fullUrl, {
method: "POST",
headers: {
authorization: `Bearer ${ENV.forgeApiKey}`,
"Accept-Encoding": "identity",
},
body: formData,
});
if (!response.ok) {
const errorText = await response.text().catch(() => "");
return {
error: "Transcription service request failed",
code: "TRANSCRIPTION_FAILED",
details: `${response.status} ${response.statusText}${errorText ? `: ${errorText}` : ""}`
};
}
// Step 5: Parse and return the transcription result
const whisperResponse = await response.json() as WhisperResponse;
// Validate response structure
if (!whisperResponse.text || typeof whisperResponse.text !== 'string') {
return {
error: "Invalid transcription response",
code: "SERVICE_ERROR",
details: "Transcription service returned an invalid response format"
};
}
return whisperResponse; // Return native Whisper API response directly
} catch (error) {
// Handle unexpected errors
return {
error: "Voice transcription failed",
code: "SERVICE_ERROR",
details: error instanceof Error ? error.message : "An unexpected error occurred"
};
}
}
/**
* Helper function to get file extension from MIME type
*/
function getFileExtension(mimeType: string): string {
const mimeToExt: Record<string, string> = {
'audio/webm': 'webm',
'audio/mp3': 'mp3',
'audio/mpeg': 'mp3',
'audio/wav': 'wav',
'audio/wave': 'wav',
'audio/ogg': 'ogg',
'audio/m4a': 'm4a',
'audio/mp4': 'm4a',
};
return mimeToExt[mimeType] || 'audio';
}
/**
* Helper function to get full language name from ISO code
*/
function getLanguageName(langCode: string): string {
const langMap: Record<string, string> = {
'en': 'English',
'es': 'Spanish',
'fr': 'French',
'de': 'German',
'it': 'Italian',
'pt': 'Portuguese',
'ru': 'Russian',
'ja': 'Japanese',
'ko': 'Korean',
'zh': 'Chinese',
'ar': 'Arabic',
'hi': 'Hindi',
'nl': 'Dutch',
'pl': 'Polish',
'tr': 'Turkish',
'sv': 'Swedish',
'da': 'Danish',
'no': 'Norwegian',
'fi': 'Finnish',
};
return langMap[langCode] || langCode;
}
/**
* Example tRPC procedure implementation:
*
* ```ts
* // In server/routers.ts
* import { transcribeAudio } from "./_core/voiceTranscription";
*
* export const voiceRouter = router({
* transcribe: protectedProcedure
* .input(z.object({
* audioUrl: z.string(),
* language: z.string().optional(),
* prompt: z.string().optional(),
* }))
* .mutation(async ({ input, ctx }) => {
* const result = await transcribeAudio(input);
*
* // Check if it's an error
* if ('error' in result) {
* throw new TRPCError({
* code: 'BAD_REQUEST',
* message: result.error,
* cause: result,
* });
* }
*
* // Optionally save transcription to database
* await db.insert(transcriptions).values({
* userId: ctx.user.id,
* text: result.text,
* duration: result.duration,
* language: result.language,
* audioUrl: input.audioUrl,
* createdAt: new Date(),
* });
*
* return result;
* }),
* });
* ```
*/

View File

@@ -0,0 +1,62 @@
import { describe, expect, it } from "vitest";
import { appRouter } from "./routers";
import { COOKIE_NAME } from "../shared/const";
import type { TrpcContext } from "./_core/context";
type CookieCall = {
name: string;
options: Record<string, unknown>;
};
type AuthenticatedUser = NonNullable<TrpcContext["user"]>;
function createAuthContext(): { ctx: TrpcContext; clearedCookies: CookieCall[] } {
const clearedCookies: CookieCall[] = [];
const user: AuthenticatedUser = {
id: 1,
openId: "sample-user",
email: "sample@example.com",
name: "Sample User",
loginMethod: "manus",
role: "user",
createdAt: new Date(),
updatedAt: new Date(),
lastSignedIn: new Date(),
};
const ctx: TrpcContext = {
user,
req: {
protocol: "https",
headers: {},
} as TrpcContext["req"],
res: {
clearCookie: (name: string, options: Record<string, unknown>) => {
clearedCookies.push({ name, options });
},
} as TrpcContext["res"],
};
return { ctx, clearedCookies };
}
describe("auth.logout", () => {
it("clears the session cookie and reports success", async () => {
const { ctx, clearedCookies } = createAuthContext();
const caller = appRouter.createCaller(ctx);
const result = await caller.auth.logout();
expect(result).toEqual({ success: true });
expect(clearedCookies).toHaveLength(1);
expect(clearedCookies[0]?.name).toBe(COOKIE_NAME);
expect(clearedCookies[0]?.options).toMatchObject({
maxAge: -1,
secure: true,
sameSite: "none",
httpOnly: true,
path: "/",
});
});
});

317
server/db.ts Normal file
View File

@@ -0,0 +1,317 @@
import { eq, desc, and, like, gte, lte, or, sql } from "drizzle-orm";
import { drizzle } from "drizzle-orm/mysql2";
import {
InsertUser,
users,
localUsers,
veilleItems,
aapItems,
appSettings,
importLogs,
InsertLocalUser,
} from "../drizzle/schema";
import { ENV } from "./_core/env";
let _db: ReturnType<typeof drizzle> | null = null;
export async function getDb() {
if (!_db && process.env.DATABASE_URL) {
try {
_db = drizzle(process.env.DATABASE_URL);
} catch (error) {
console.warn("[Database] Failed to connect:", error);
_db = null;
}
}
return _db;
}
// ─── Users (Manus OAuth) ─────────────────────────────────────────────────────
export async function upsertUser(user: InsertUser): Promise<void> {
if (!user.openId) throw new Error("User openId is required for upsert");
const db = await getDb();
if (!db) { console.warn("[Database] Cannot upsert user: database not available"); return; }
const values: InsertUser = { openId: user.openId };
const updateSet: Record<string, unknown> = {};
const textFields = ["name", "email", "loginMethod"] as const;
for (const field of textFields) {
const value = user[field];
if (value === undefined) continue;
const normalized = value ?? null;
values[field] = normalized;
updateSet[field] = normalized;
}
if (user.lastSignedIn !== undefined) { values.lastSignedIn = user.lastSignedIn; updateSet.lastSignedIn = user.lastSignedIn; }
if (user.role !== undefined) { values.role = user.role; updateSet.role = user.role; }
else if (user.openId === ENV.ownerOpenId) { values.role = "admin"; updateSet.role = "admin"; }
if (!values.lastSignedIn) values.lastSignedIn = new Date();
if (Object.keys(updateSet).length === 0) updateSet.lastSignedIn = new Date();
await db.insert(users).values(values).onDuplicateKeyUpdate({ set: updateSet });
}
export async function getUserByOpenId(openId: string) {
const db = await getDb();
if (!db) return undefined;
const result = await db.select().from(users).where(eq(users.openId, openId)).limit(1);
return result[0];
}
// ─── Local Users ─────────────────────────────────────────────────────────────
export async function getLocalUsers() {
const db = await getDb();
if (!db) return [];
return db
.select({
id: localUsers.id,
name: localUsers.name,
email: localUsers.email,
role: localUsers.role,
isActive: localUsers.isActive,
createdAt: localUsers.createdAt,
lastSignedIn: localUsers.lastSignedIn,
})
.from(localUsers)
.orderBy(desc(localUsers.createdAt));
}
export async function createLocalUser(data: Omit<InsertLocalUser, "id" | "createdAt" | "updatedAt">) {
const db = await getDb();
if (!db) throw new Error("DB unavailable");
await db.insert(localUsers).values(data);
}
export async function updateLocalUser(id: number, data: Partial<InsertLocalUser>) {
const db = await getDb();
if (!db) throw new Error("DB unavailable");
await db.update(localUsers).set(data).where(eq(localUsers.id, id));
}
export async function deleteLocalUser(id: number) {
const db = await getDb();
if (!db) throw new Error("DB unavailable");
await db.delete(localUsers).where(eq(localUsers.id, id));
}
// ─── Veille Items ─────────────────────────────────────────────────────────────
export interface VeilleFilters {
typeVeille?: string;
categorie?: string;
niveau?: string;
territoire?: string;
search?: string;
dateFrom?: Date;
dateTo?: Date;
page?: number;
pageSize?: number;
}
export async function getVeilleItems(filters: VeilleFilters = {}) {
const db = await getDb();
if (!db) return { items: [], total: 0 };
const { page = 1, pageSize = 50, ...f } = filters;
const offset = (page - 1) * pageSize;
const conditions = [];
if (f.typeVeille) conditions.push(eq(veilleItems.typeVeille, f.typeVeille as "reglementaire" | "concurrentielle" | "technologique" | "generale"));
if (f.categorie) conditions.push(like(veilleItems.categorie, `%${f.categorie}%`));
if (f.niveau) conditions.push(like(veilleItems.niveau, `%${f.niveau}%`));
if (f.territoire) conditions.push(like(veilleItems.territoire, `%${f.territoire}%`));
if (f.search) {
conditions.push(
or(
like(veilleItems.titre, `%${f.search}%`),
like(veilleItems.resume, `%${f.search}%`)
)
);
}
if (f.dateFrom) conditions.push(gte(veilleItems.datePublication, f.dateFrom));
if (f.dateTo) conditions.push(lte(veilleItems.datePublication, f.dateTo));
const where = conditions.length > 0 ? and(...conditions) : undefined;
const [items, countResult] = await Promise.all([
db
.select()
.from(veilleItems)
.where(where)
.orderBy(desc(veilleItems.datePublication), desc(veilleItems.importedAt))
.limit(pageSize)
.offset(offset),
db
.select({ count: sql<number>`count(*)` })
.from(veilleItems)
.where(where),
]);
return { items, total: Number(countResult[0]?.count ?? 0) };
}
export async function getVeilleDistinctValues() {
const db = await getDb();
if (!db) return { categories: [], niveaux: [], territoires: [] };
const [cats, niveaux, territoires] = await Promise.all([
db.selectDistinct({ value: veilleItems.categorie }).from(veilleItems).where(sql`${veilleItems.categorie} IS NOT NULL`),
db.selectDistinct({ value: veilleItems.niveau }).from(veilleItems).where(sql`${veilleItems.niveau} IS NOT NULL`),
db.selectDistinct({ value: veilleItems.territoire }).from(veilleItems).where(sql`${veilleItems.territoire} IS NOT NULL`),
]);
return {
categories: cats.map((r) => r.value!).filter(Boolean).sort(),
niveaux: niveaux.map((r) => r.value!).filter(Boolean).sort(),
territoires: territoires.map((r) => r.value!).filter(Boolean).sort(),
};
}
// ─── AAP Items ────────────────────────────────────────────────────────────────
export interface AapFilters {
categorie?: string;
region?: string;
departement?: string;
search?: string;
dateFrom?: Date;
dateTo?: Date;
clotureFrom?: Date;
clotureTo?: Date;
page?: number;
pageSize?: number;
}
export async function getAapItems(filters: AapFilters = {}) {
const db = await getDb();
if (!db) return { items: [], total: 0 };
const { page = 1, pageSize = 50, ...f } = filters;
const offset = (page - 1) * pageSize;
const conditions = [];
if (f.categorie) conditions.push(eq(aapItems.categorie, f.categorie as "Handicap" | "PA" | "Enfance" | "Précarité" | "Sanitaire" | "Autre"));
if (f.region) conditions.push(like(aapItems.region, `%${f.region}%`));
if (f.departement) conditions.push(like(aapItems.departement, `%${f.departement}%`));
if (f.search) conditions.push(like(aapItems.titre, `%${f.search}%`));
if (f.dateFrom) conditions.push(gte(aapItems.datePublication, f.dateFrom));
if (f.dateTo) conditions.push(lte(aapItems.datePublication, f.dateTo));
if (f.clotureFrom) conditions.push(gte(aapItems.dateCloture, f.clotureFrom));
if (f.clotureTo) conditions.push(lte(aapItems.dateCloture, f.clotureTo));
const where = conditions.length > 0 ? and(...conditions) : undefined;
const [items, countResult] = await Promise.all([
db
.select()
.from(aapItems)
.where(where)
.orderBy(desc(aapItems.datePublication), desc(aapItems.importedAt))
.limit(pageSize)
.offset(offset),
db
.select({ count: sql<number>`count(*)` })
.from(aapItems)
.where(where),
]);
return { items, total: Number(countResult[0]?.count ?? 0) };
}
export async function getAapDistinctValues() {
const db = await getDb();
if (!db) return { regions: [], departements: [] };
const [regions, departements] = await Promise.all([
db.selectDistinct({ value: aapItems.region }).from(aapItems).where(sql`${aapItems.region} IS NOT NULL`),
db.selectDistinct({ value: aapItems.departement }).from(aapItems).where(sql`${aapItems.departement} IS NOT NULL`),
]);
return {
regions: regions.map((r) => r.value!).filter(Boolean).sort(),
departements: departements.map((r) => r.value!).filter(Boolean).sort(),
};
}
// ─── App Settings ─────────────────────────────────────────────────────────────
export async function getSetting(key: string): Promise<string | null> {
const db = await getDb();
if (!db) return null;
const rows = await db.select().from(appSettings).where(eq(appSettings.key, key)).limit(1);
return rows[0]?.value ?? null;
}
export async function getAllSettings(): Promise<Record<string, string>> {
const db = await getDb();
if (!db) return {};
const rows = await db.select().from(appSettings);
const map: Record<string, string> = {};
for (const r of rows) {
if (r.key && r.value !== null && r.value !== undefined) map[r.key] = r.value;
}
return map;
}
export async function setSetting(key: string, value: string): Promise<void> {
const db = await getDb();
if (!db) throw new Error("DB unavailable");
await db
.insert(appSettings)
.values({ key, value })
.onDuplicateKeyUpdate({ set: { value } });
}
export async function setSettings(settings: Record<string, string>): Promise<void> {
const db = await getDb();
if (!db) throw new Error("DB unavailable");
for (const [key, value] of Object.entries(settings)) {
await db
.insert(appSettings)
.values({ key, value })
.onDuplicateKeyUpdate({ set: { value } });
}
}
// ─── Import Logs ──────────────────────────────────────────────────────────────
export async function getImportLogs(limit = 50) {
const db = await getDb();
if (!db) return [];
return db
.select()
.from(importLogs)
.orderBy(desc(importLogs.startedAt))
.limit(limit);
}
export async function getImportStats() {
const db = await getDb();
if (!db) return { totalVeille: 0, totalAap: 0, lastImport: null, total: 0, success: 0, errors: 0, totalNewRows: 0 };
const [veilleCount, aapCount, lastLog, allLogs] = await Promise.all([
db.select({ count: sql<number>`count(*)` }).from(veilleItems),
db.select({ count: sql<number>`count(*)` }).from(aapItems),
db.select().from(importLogs).orderBy(desc(importLogs.startedAt)).limit(1),
db.select().from(importLogs),
]);
const total = allLogs.length;
const success = allLogs.filter(l => l.status === 'success').length;
const errors = allLogs.filter(l => l.status === 'error').length;
const totalNewRows = allLogs.reduce((sum, l) => sum + (l.newRows ?? 0), 0);
return {
totalVeille: Number(veilleCount[0]?.count ?? 0),
totalAap: Number(aapCount[0]?.count ?? 0),
lastImport: lastLog[0] ?? null,
total,
success,
errors,
totalNewRows,
};
}

390
server/importer.ts Normal file
View File

@@ -0,0 +1,390 @@
import * as XLSX from "xlsx";
import * as crypto from "crypto";
import * as fs from "fs";
import * as path from "path";
import * as ftp from "basic-ftp";
import * as https from "https";
import * as http from "http";
import { getDb } from "./db";
import { veilleItems, aapItems, importLogs, appSettings } from "../drizzle/schema";
import { eq, inArray } from "drizzle-orm";
// ─── Types ───────────────────────────────────────────────────────────────────
export type SourceType = "local" | "onedrive" | "ftp" | "sharepoint";
export interface ImportConfig {
sourceType: SourceType;
veilleFilePath?: string;
aapFilePath?: string;
ftpHost?: string;
ftpPort?: number;
ftpUser?: string;
ftpPassword?: string;
ftpSecure?: boolean;
onedriveToken?: string;
sharepointSiteUrl?: string;
sharepointToken?: string;
}
export interface ImportResult {
fileType: "veille" | "aap";
totalRows: number;
newRows: number;
skippedRows: number;
errors: string[];
status: "success" | "partial" | "error";
}
// ─── Utilitaires ─────────────────────────────────────────────────────────────
function makeDedupKey(titre: string, lien?: string | null): string {
const raw = `${(titre || "").trim().toLowerCase()}|${(lien || "").trim().toLowerCase()}`;
return crypto.createHash("md5").update(raw).digest("hex");
}
function parseDate(value: unknown): Date | null {
if (!value) return null;
if (value instanceof Date) return isNaN(value.getTime()) ? null : value;
if (typeof value === "string") {
const cleaned = value.replace("Z", "").trim();
const d = new Date(cleaned);
return isNaN(d.getTime()) ? null : d;
}
if (typeof value === "number") {
// Excel serial date
const d = XLSX.SSF.parse_date_code(value);
if (d) return new Date(d.y, d.m - 1, d.d);
}
return null;
}
function normalizeStr(v: unknown): string | null {
if (v === null || v === undefined) return null;
const s = String(v).trim();
return s === "" || s === "Non renseigné" ? null : s;
}
// ─── Téléchargement des fichiers selon la source ─────────────────────────────
async function downloadFile(
filePath: string,
config: ImportConfig
): Promise<Buffer> {
switch (config.sourceType) {
case "local": {
if (!fs.existsSync(filePath)) {
throw new Error(`Fichier introuvable : ${filePath}`);
}
return fs.readFileSync(filePath);
}
case "ftp": {
const client = new ftp.Client();
client.ftp.verbose = false;
try {
await client.access({
host: config.ftpHost!,
port: config.ftpPort || 21,
user: config.ftpUser!,
password: config.ftpPassword!,
secure: config.ftpSecure || false,
});
const tmpPath = `/tmp/veille_import_${Date.now()}.xlsx`;
await client.downloadTo(tmpPath, filePath);
const buf = fs.readFileSync(tmpPath);
fs.unlinkSync(tmpPath);
return buf;
} finally {
client.close();
}
}
case "onedrive":
case "sharepoint": {
const token =
config.sourceType === "onedrive"
? config.onedriveToken
: config.sharepointToken;
if (!token) throw new Error("Token d'authentification manquant");
return new Promise((resolve, reject) => {
const url = new URL(filePath);
const options = {
hostname: url.hostname,
path: url.pathname + url.search,
headers: { Authorization: `Bearer ${token}` },
};
const protocol = url.protocol === "https:" ? https : http;
protocol
.get(options, (res) => {
const chunks: Buffer[] = [];
res.on("data", (c) => chunks.push(c));
res.on("end", () => resolve(Buffer.concat(chunks)));
res.on("error", reject);
})
.on("error", reject);
});
}
default:
throw new Error(`Source non supportée : ${config.sourceType}`);
}
}
// ─── Lecture des paramètres depuis la BDD ────────────────────────────────────
export async function getImportConfig(): Promise<ImportConfig> {
const db = await getDb();
if (!db) return { sourceType: "local" };
const rows = await db.select().from(appSettings);
const map: Record<string, string> = {};
for (const r of rows) {
if (r.key && r.value) map[r.key] = r.value;
}
return {
sourceType: (map["source_type"] as SourceType) || "local",
veilleFilePath: map["veille_file_path"] || "",
aapFilePath: map["aap_file_path"] || "",
ftpHost: map["ftp_host"],
ftpPort: map["ftp_port"] ? parseInt(map["ftp_port"]) : 21,
ftpUser: map["ftp_user"],
ftpPassword: map["ftp_password"],
ftpSecure: map["ftp_secure"] === "true",
onedriveToken: map["onedrive_token"],
sharepointSiteUrl: map["sharepoint_site_url"],
sharepointToken: map["sharepoint_token"],
};
}
// ─── Import Veille Stratégique ───────────────────────────────────────────────
const VEILLE_SHEETS: Record<string, "reglementaire" | "concurrentielle" | "technologique" | "generale"> = {
réglementaire: "reglementaire",
reglementaire: "reglementaire",
concurrentielle: "concurrentielle",
technologique: "technologique",
générale: "generale",
generale: "generale",
};
export async function importVeille(config: ImportConfig): Promise<ImportResult> {
const startedAt = new Date();
const errors: string[] = [];
let totalRows = 0;
let newRows = 0;
let skippedRows = 0;
const db = await getDb();
if (!db) throw new Error("Base de données indisponible");
const filePath = config.veilleFilePath;
if (!filePath) throw new Error("Chemin du fichier Veille non configuré");
let buffer: Buffer;
try {
buffer = await downloadFile(filePath, config);
} catch (e: unknown) {
const msg = e instanceof Error ? e.message : String(e);
await logImport(db, "veille", filePath, "error", 0, 0, 0, msg, null, startedAt);
return { fileType: "veille", totalRows: 0, newRows: 0, skippedRows: 0, errors: [msg], status: "error" };
}
const workbook = XLSX.read(buffer, { type: "buffer", cellDates: true });
for (const sheetName of workbook.SheetNames) {
const normalized = sheetName.toLowerCase().trim();
if (normalized === "poubelle") continue;
const typeVeille = VEILLE_SHEETS[normalized];
if (!typeVeille) continue;
const sheet = workbook.Sheets[sheetName];
const rows = XLSX.utils.sheet_to_json<Record<string, unknown>>(sheet, { defval: null });
for (const row of rows) {
totalRows++;
const titre = normalizeStr(row["Titre"]);
if (!titre) { skippedRows++; continue; }
const lien = normalizeStr(row["Lien"]);
const dedupKey = makeDedupKey(titre, lien);
// Vérifier si déjà présent
const existing = await db
.select({ id: veilleItems.id })
.from(veilleItems)
.where(eq(veilleItems.dedupKey, dedupKey))
.limit(1);
if (existing.length > 0) { skippedRows++; continue; }
// Extraire la date depuis la colonne Source (qui contient une date ISO)
const sourceRaw = row["Source"];
const datePublication = parseDate(sourceRaw);
// La vraie source (URL) semble être dans Lien pour certaines feuilles
const sourceStr = normalizeStr(sourceRaw instanceof Date ? null : sourceRaw);
try {
await db.insert(veilleItems).values({
dedupKey,
titre,
categorie: normalizeStr(row["Catégorie"]),
niveau: normalizeStr(row["Niveau"]),
territoire: normalizeStr(row["Territoire"]),
resume: normalizeStr(row[" Résumé"] ?? row["Résumé"] ?? row["Resume"]),
source: sourceStr,
passage: normalizeStr(row["passage"] ?? row["Passage"]),
lien,
typeVeille,
datePublication,
});
newRows++;
} catch (e: unknown) {
const msg = e instanceof Error ? e.message : String(e);
errors.push(`[${sheetName}] ${titre?.substring(0, 50)}: ${msg}`);
skippedRows++;
}
}
}
const status = errors.length === 0 ? "success" : newRows > 0 ? "partial" : "error";
await logImport(db, "veille", filePath, status, totalRows, newRows, skippedRows, errors.join("\n") || null, { errors }, startedAt);
return { fileType: "veille", totalRows, newRows, skippedRows, errors, status };
}
// ─── Import Appels à Projets ─────────────────────────────────────────────────
const AAP_SHEETS: Record<string, "Handicap" | "PA" | "Enfance" | "Précarité" | "Sanitaire" | "Autre"> = {
handicap: "Handicap",
pa: "PA",
enfance: "Enfance",
"précarité": "Précarité",
precarite: "Précarité",
sanitaire: "Sanitaire",
autre: "Autre",
};
export async function importAAP(config: ImportConfig): Promise<ImportResult> {
const startedAt = new Date();
const errors: string[] = [];
let totalRows = 0;
let newRows = 0;
let skippedRows = 0;
const db = await getDb();
if (!db) throw new Error("Base de données indisponible");
const filePath = config.aapFilePath;
if (!filePath) throw new Error("Chemin du fichier AAP non configuré");
let buffer: Buffer;
try {
buffer = await downloadFile(filePath, config);
} catch (e: unknown) {
const msg = e instanceof Error ? e.message : String(e);
await logImport(db, "aap", filePath, "error", 0, 0, 0, msg, null, startedAt);
return { fileType: "aap", totalRows: 0, newRows: 0, skippedRows: 0, errors: [msg], status: "error" };
}
const workbook = XLSX.read(buffer, { type: "buffer", cellDates: true });
for (const sheetName of workbook.SheetNames) {
const normalized = sheetName.toLowerCase().trim().replace(/é/g, "e").replace(/è/g, "e");
const categorie = AAP_SHEETS[sheetName.toLowerCase().trim()] || AAP_SHEETS[normalized];
if (!categorie) continue;
const sheet = workbook.Sheets[sheetName];
const rows = XLSX.utils.sheet_to_json<Record<string, unknown>>(sheet, { defval: null });
for (const row of rows) {
totalRows++;
const titre = normalizeStr(row["Titre"]);
if (!titre) { skippedRows++; continue; }
const lien = normalizeStr(row["Lien"]);
const dedupKey = makeDedupKey(titre, lien);
const existing = await db
.select({ id: aapItems.id })
.from(aapItems)
.where(eq(aapItems.dedupKey, dedupKey))
.limit(1);
if (existing.length > 0) { skippedRows++; continue; }
const datePublication = parseDate(row["Date publication"]);
const dateCloture = parseDate(row["Date clôture"]);
try {
await db.insert(aapItems).values({
dedupKey,
titre,
categorie,
region: normalizeStr(row["Région"]),
departement: normalizeStr(row["Département"]),
dateCloture,
datePublication,
lien,
});
newRows++;
} catch (e: unknown) {
const msg = e instanceof Error ? e.message : String(e);
errors.push(`[${sheetName}] ${titre?.substring(0, 50)}: ${msg}`);
skippedRows++;
}
}
}
const status = errors.length === 0 ? "success" : newRows > 0 ? "partial" : "error";
await logImport(db, "aap", filePath, status, totalRows, newRows, skippedRows, errors.join("\n") || null, { errors }, startedAt);
return { fileType: "aap", totalRows, newRows, skippedRows, errors, status };
}
// ─── Import complet (veille + AAP) ───────────────────────────────────────────
export async function runFullImport(): Promise<{ veille: ImportResult; aap: ImportResult }> {
const config = await getImportConfig();
const [veille, aap] = await Promise.all([
importVeille(config),
importAAP(config),
]);
return { veille, aap };
}
// ─── Enregistrement des logs ─────────────────────────────────────────────────
async function logImport(
db: Awaited<ReturnType<typeof getDb>>,
fileType: "veille" | "aap",
source: string,
status: "success" | "partial" | "error",
totalRows: number,
newRows: number,
skippedRows: number,
errorMessage: string | null,
details: unknown,
startedAt: Date
) {
if (!db) return;
try {
await db.insert(importLogs).values({
fileType,
source,
status,
totalRows,
newRows,
skippedRows,
errorMessage,
details: details as Record<string, unknown> | null,
startedAt,
completedAt: new Date(),
});
} catch (e) {
console.error("[Import] Erreur lors de l'enregistrement du log:", e);
}
}

103
server/localAuth.ts Normal file
View File

@@ -0,0 +1,103 @@
import bcrypt from "bcryptjs";
import { getDb } from "./db";
import { localUsers } from "../drizzle/schema";
import { eq, or } from "drizzle-orm";
import { SignJWT, jwtVerify } from "jose";
import { ENV } from "./_core/env";
const SALT_ROUNDS = 12;
const JWT_EXPIRY = "7d";
const LOCAL_AUTH_COOKIE = "veille_local_auth";
export async function hashPassword(password: string): Promise<string> {
return bcrypt.hash(password, SALT_ROUNDS);
}
export async function verifyPassword(password: string, hash: string): Promise<boolean> {
return bcrypt.compare(password, hash);
}
export async function generateLocalToken(userId: number, role: string): Promise<string> {
const secret = new TextEncoder().encode(ENV.cookieSecret);
return new SignJWT({ sub: String(userId), role, type: "local" })
.setProtectedHeader({ alg: "HS256" })
.setIssuedAt()
.setExpirationTime(JWT_EXPIRY)
.sign(secret);
}
export async function verifyLocalToken(token: string): Promise<{ userId: number; role: string } | null> {
try {
const secret = new TextEncoder().encode(ENV.cookieSecret);
const { payload } = await jwtVerify(token, secret);
if (payload.type !== "local" || !payload.sub) return null;
return { userId: parseInt(payload.sub), role: payload.role as string };
} catch {
return null;
}
}
export async function loginLocalUser(email: string, password: string) {
const db = await getDb();
if (!db) throw new Error("Base de données indisponible");
// Recherche par e-mail (insensible à la casse) OU par identifiant exact
const identifier = email.trim();
const users = await db
.select()
.from(localUsers)
.where(
or(
eq(localUsers.email, identifier.toLowerCase()),
eq(localUsers.email, identifier)
)
)
.limit(1);
const user = users[0];
if (!user || !user.isActive) {
throw new Error("Identifiants incorrects ou compte désactivé");
}
const valid = await verifyPassword(password, user.passwordHash);
if (!valid) throw new Error("Identifiants incorrects ou compte désactivé");
// Mise à jour lastSignedIn
await db
.update(localUsers)
.set({ lastSignedIn: new Date() })
.where(eq(localUsers.id, user.id));
const token = await generateLocalToken(user.id, user.role);
return { token, user: { id: user.id, name: user.name, email: user.email, role: user.role } };
}
export async function getLocalUserById(id: number) {
const db = await getDb();
if (!db) return null;
const users = await db.select().from(localUsers).where(eq(localUsers.id, id)).limit(1);
return users[0] ?? null;
}
export async function ensureAdminExists() {
const db = await getDb();
if (!db) return;
const admins = await db
.select({ id: localUsers.id })
.from(localUsers)
.where(eq(localUsers.role, "admin"))
.limit(1);
if (admins.length === 0) {
const hash = await hashPassword("Admin@Itinova2024!");
await db.insert(localUsers).values({
name: "Administrateur",
email: "admin@itinova.fr",
passwordHash: hash,
role: "admin",
isActive: true,
});
console.log("[LocalAuth] Compte admin par défaut créé : admin@itinova.fr / Admin@Itinova2024!");
}
}

243
server/routers.ts Normal file
View File

@@ -0,0 +1,243 @@
import { z } from "zod";
import { TRPCError } from "@trpc/server";
import { COOKIE_NAME } from "@shared/const";
import { getSessionCookieOptions } from "./_core/cookies";
import { systemRouter } from "./_core/systemRouter";
import { publicProcedure, protectedProcedure, router } from "./_core/trpc";
import {
getVeilleItems,
getVeilleDistinctValues,
getAapItems,
getAapDistinctValues,
getAllSettings,
setSettings,
getImportLogs,
getImportStats,
getLocalUsers,
createLocalUser,
updateLocalUser,
deleteLocalUser,
} from "./db";
import { importVeille, importAAP, runFullImport, getImportConfig } from "./importer";
import { loginLocalUser, hashPassword, ensureAdminExists } from "./localAuth";
// ─── Middleware admin ─────────────────────────────────────────────────────────
const adminProcedure = protectedProcedure.use(({ ctx, next }) => {
if (ctx.user.role !== "admin") {
throw new TRPCError({ code: "FORBIDDEN", message: "Accès réservé aux administrateurs" });
}
return next({ ctx });
});
// ─── Router principal ─────────────────────────────────────────────────────────
export const appRouter = router({
system: systemRouter,
// ─── Auth ───────────────────────────────────────────────────────────────────
auth: router({
me: publicProcedure.query((opts) => opts.ctx.user),
logout: publicProcedure.mutation(({ ctx }) => {
const cookieOptions = getSessionCookieOptions(ctx.req);
ctx.res.clearCookie(COOKIE_NAME, { ...cookieOptions, maxAge: -1 });
return { success: true } as const;
}),
// Connexion locale
localLogin: publicProcedure
.input(z.object({ email: z.string().min(1), password: z.string().min(1) }))
.mutation(async ({ input, ctx }) => {
const result = await loginLocalUser(input.email, input.password);
// Stocker le token dans un cookie
const cookieOptions = getSessionCookieOptions(ctx.req);
ctx.res.cookie("veille_local_auth", result.token, {
...cookieOptions,
maxAge: 7 * 24 * 60 * 60 * 1000,
});
return { success: true, user: result.user };
}),
localLogout: publicProcedure.mutation(({ ctx }) => {
const cookieOptions = getSessionCookieOptions(ctx.req);
ctx.res.clearCookie("veille_local_auth", { ...cookieOptions, maxAge: -1 });
return { success: true };
}),
}),
// ─── Veille ─────────────────────────────────────────────────────────────────
veille: router({
list: publicProcedure
.input(
z.object({
typeVeille: z.enum(["reglementaire", "concurrentielle", "technologique", "generale"]).optional(),
categorie: z.string().optional(),
niveau: z.string().optional(),
territoire: z.string().optional(),
search: z.string().optional(),
dateFrom: z.date().optional(),
dateTo: z.date().optional(),
page: z.number().int().positive().default(1),
pageSize: z.number().int().positive().max(200).default(50),
})
)
.query(async ({ input }) => {
return getVeilleItems(input);
}),
filters: publicProcedure.query(async () => {
return getVeilleDistinctValues();
}),
}),
// ─── AAP ────────────────────────────────────────────────────────────────────
aap: router({
list: publicProcedure
.input(
z.object({
categorie: z.enum(["Handicap", "PA", "Enfance", "Précarité", "Sanitaire", "Autre"]).optional(),
region: z.string().optional(),
departement: z.string().optional(),
search: z.string().optional(),
dateFrom: z.date().optional(),
dateTo: z.date().optional(),
clotureFrom: z.date().optional(),
clotureTo: z.date().optional(),
page: z.number().int().positive().default(1),
pageSize: z.number().int().positive().max(200).default(50),
})
)
.query(async ({ input }) => {
return getAapItems(input);
}),
filters: publicProcedure.query(async () => {
return getAapDistinctValues();
}),
}),
// ─── Import ─────────────────────────────────────────────────────────────────
import: router({
run: adminProcedure
.input(z.object({ type: z.enum(["veille", "aap", "all"]).default("all") }))
.mutation(async ({ input }) => {
const config = await getImportConfig();
if (input.type === "all") return runFullImport();
if (input.type === "veille") return { veille: await importVeille(config) };
return { aap: await importAAP(config) };
}),
logs: adminProcedure
.input(z.object({ page: z.number().int().positive().default(1), pageSize: z.number().int().positive().max(100).default(20) }))
.query(async ({ input }) => {
const allLogs = await getImportLogs(500);
const start = (input.page - 1) * input.pageSize;
const logs = allLogs.slice(start, start + input.pageSize);
const stats = await getImportStats();
return { logs, total: allLogs.length, stats };
}),
stats: publicProcedure.query(async () => {
return getImportStats();
}),
}),
// ─── Paramètres ─────────────────────────────────────────────────────────────
settings: router({
get: adminProcedure.query(async () => {
const all = await getAllSettings();
// Masquer les mots de passe
const safe = { ...all };
if (safe.ftp_password) safe.ftp_password = "••••••••";
if (safe.onedrive_token) safe.onedrive_token = "••••••••";
if (safe.sharepoint_token) safe.sharepoint_token = "••••••••";
return safe;
}),
save: adminProcedure
.input(
z.object({
source_type: z.enum(["local", "onedrive", "ftp", "sharepoint"]),
veille_file_path: z.string().optional(),
aap_file_path: z.string().optional(),
ftp_host: z.string().optional(),
ftp_port: z.string().optional(),
ftp_user: z.string().optional(),
ftp_password: z.string().optional(),
ftp_secure: z.string().optional(),
onedrive_token: z.string().optional(),
sharepoint_site_url: z.string().optional(),
sharepoint_token: z.string().optional(),
auth_mode: z.enum(["local", "free"]).optional(),
import_time: z.string().optional(),
})
)
.mutation(async ({ input }) => {
const toSave: Record<string, string> = {};
for (const [k, v] of Object.entries(input)) {
if (v !== undefined && v !== "••••••••") toSave[k] = v;
}
await setSettings(toSave);
return { success: true };
}),
}),
// ─── Utilisateurs locaux ─────────────────────────────────────────────────────
users: router({
list: adminProcedure.query(async () => {
return getLocalUsers();
}),
create: adminProcedure
.input(
z.object({
name: z.string().min(2).max(255),
email: z.string().email(),
password: z.string().min(8),
role: z.enum(["admin", "user", "readonly"]).default("user"),
})
)
.mutation(async ({ input }) => {
const passwordHash = await hashPassword(input.password);
await createLocalUser({
name: input.name,
email: input.email.toLowerCase(),
passwordHash,
role: input.role,
isActive: true,
});
return { success: true };
}),
update: adminProcedure
.input(
z.object({
id: z.number().int().positive(),
name: z.string().min(2).max(255).optional(),
email: z.string().email().optional(),
password: z.string().min(8).optional(),
role: z.enum(["admin", "user", "readonly"]).optional(),
isActive: z.boolean().optional(),
})
)
.mutation(async ({ input }) => {
const { id, password, ...rest } = input;
const data: Record<string, unknown> = { ...rest };
if (password) data.passwordHash = await hashPassword(password);
await updateLocalUser(id, data as Parameters<typeof updateLocalUser>[1]);
return { success: true };
}),
delete: adminProcedure
.input(z.object({ id: z.number().int().positive() }))
.mutation(async ({ input }) => {
await deleteLocalUser(input.id);
return { success: true };
}),
ensureAdmin: publicProcedure.mutation(async () => {
await ensureAdminExists();
return { success: true };
}),
}),
});
export type AppRouter = typeof appRouter;

102
server/storage.ts Normal file
View File

@@ -0,0 +1,102 @@
// Preconfigured storage helpers for Manus WebDev templates
// Uses the Biz-provided storage proxy (Authorization: Bearer <token>)
import { ENV } from './_core/env';
type StorageConfig = { baseUrl: string; apiKey: string };
function getStorageConfig(): StorageConfig {
const baseUrl = ENV.forgeApiUrl;
const apiKey = ENV.forgeApiKey;
if (!baseUrl || !apiKey) {
throw new Error(
"Storage proxy credentials missing: set BUILT_IN_FORGE_API_URL and BUILT_IN_FORGE_API_KEY"
);
}
return { baseUrl: baseUrl.replace(/\/+$/, ""), apiKey };
}
function buildUploadUrl(baseUrl: string, relKey: string): URL {
const url = new URL("v1/storage/upload", ensureTrailingSlash(baseUrl));
url.searchParams.set("path", normalizeKey(relKey));
return url;
}
async function buildDownloadUrl(
baseUrl: string,
relKey: string,
apiKey: string
): Promise<string> {
const downloadApiUrl = new URL(
"v1/storage/downloadUrl",
ensureTrailingSlash(baseUrl)
);
downloadApiUrl.searchParams.set("path", normalizeKey(relKey));
const response = await fetch(downloadApiUrl, {
method: "GET",
headers: buildAuthHeaders(apiKey),
});
return (await response.json()).url;
}
function ensureTrailingSlash(value: string): string {
return value.endsWith("/") ? value : `${value}/`;
}
function normalizeKey(relKey: string): string {
return relKey.replace(/^\/+/, "");
}
function toFormData(
data: Buffer | Uint8Array | string,
contentType: string,
fileName: string
): FormData {
const blob =
typeof data === "string"
? new Blob([data], { type: contentType })
: new Blob([data as any], { type: contentType });
const form = new FormData();
form.append("file", blob, fileName || "file");
return form;
}
function buildAuthHeaders(apiKey: string): HeadersInit {
return { Authorization: `Bearer ${apiKey}` };
}
export async function storagePut(
relKey: string,
data: Buffer | Uint8Array | string,
contentType = "application/octet-stream"
): Promise<{ key: string; url: string }> {
const { baseUrl, apiKey } = getStorageConfig();
const key = normalizeKey(relKey);
const uploadUrl = buildUploadUrl(baseUrl, key);
const formData = toFormData(data, contentType, key.split("/").pop() ?? key);
const response = await fetch(uploadUrl, {
method: "POST",
headers: buildAuthHeaders(apiKey),
body: formData,
});
if (!response.ok) {
const message = await response.text().catch(() => response.statusText);
throw new Error(
`Storage upload failed (${response.status} ${response.statusText}): ${message}`
);
}
const url = (await response.json()).url;
return { key, url };
}
export async function storageGet(relKey: string): Promise<{ key: string; url: string; }> {
const { baseUrl, apiKey } = getStorageConfig();
const key = normalizeKey(relKey);
return {
key,
url: await buildDownloadUrl(baseUrl, key, apiKey),
};
}

293
server/uploadRoutes.ts Normal file
View File

@@ -0,0 +1,293 @@
import express, { Router } from "express";
import multer from "multer";
import * as XLSX from "xlsx";
import { getDb } from "./db";
import { veilleItems, aapItems, importLogs, appSettings } from "../drizzle/schema";
import { eq } from "drizzle-orm";
import * as crypto from "crypto";
const router: Router = express.Router();
// Multer en mémoire — on traite le buffer directement
const upload = multer({
storage: multer.memoryStorage(),
limits: { fileSize: 50 * 1024 * 1024 }, // 50 MB max
fileFilter: (_req, file, cb) => {
if (
file.mimetype === "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet" ||
file.mimetype === "application/vnd.ms-excel" ||
file.originalname.endsWith(".xlsx") ||
file.originalname.endsWith(".xls")
) {
cb(null, true);
} else {
cb(new Error("Seuls les fichiers Excel (.xlsx, .xls) sont acceptés"));
}
},
});
// ─── Utilitaires ─────────────────────────────────────────────────────────────
function makeDedupKey(titre: string, lien?: string | null): string {
const raw = `${(titre || "").trim().toLowerCase()}|${(lien || "").trim().toLowerCase()}`;
return crypto.createHash("md5").update(raw).digest("hex");
}
function parseDate(value: unknown): Date | null {
if (!value) return null;
if (value instanceof Date) return isNaN(value.getTime()) ? null : value;
if (typeof value === "string") {
const cleaned = value.replace("Z", "").trim();
const d = new Date(cleaned);
return isNaN(d.getTime()) ? null : d;
}
if (typeof value === "number") {
const d = XLSX.SSF.parse_date_code(value);
if (d) return new Date(d.y, d.m - 1, d.d);
}
return null;
}
function stripHtml(html: string): string {
return html
.replace(/<[^>]+>/g, ' ')
.replace(/&nbsp;/g, ' ')
.replace(/&amp;/g, '&')
.replace(/&lt;/g, '<')
.replace(/&gt;/g, '>')
.replace(/&quot;/g, '"')
.replace(/&#39;/g, "'")
.replace(/\s+/g, ' ')
.trim();
}
function normalizeStr(v: unknown): string | null {
if (v === null || v === undefined) return null;
const s = String(v).trim();
return s === "" || s === "Non renseigné" ? null : s;
}
// ─── Mapping des feuilles ─────────────────────────────────────────────────────
const VEILLE_SHEETS: Record<string, "reglementaire" | "concurrentielle" | "technologique" | "generale"> = {
"réglementaire": "reglementaire",
"reglementaire": "reglementaire",
"concurrentielle": "concurrentielle",
"technologique": "technologique",
"générale": "generale",
"generale": "generale",
};
const AAP_SHEETS: Record<string, "Handicap" | "PA" | "Enfance" | "Précarité" | "Sanitaire" | "Autre"> = {
"handicap": "Handicap",
"pa": "PA",
"enfance": "Enfance",
"précarité": "Précarité",
"precarite": "Précarité",
"sanitaire": "Sanitaire",
"autre": "Autre",
};
// ─── Import depuis buffer ─────────────────────────────────────────────────────
async function importVeilleFromBuffer(buffer: Buffer, fileName: string) {
const startedAt = new Date();
const errors: string[] = [];
let totalRows = 0, newRows = 0, skippedRows = 0;
const db = await getDb();
if (!db) throw new Error("Base de données indisponible");
const workbook = XLSX.read(buffer, { type: "buffer", cellDates: true });
for (const sheetName of workbook.SheetNames) {
const normalized = sheetName.toLowerCase().trim();
if (normalized === "poubelle") continue;
const typeVeille = VEILLE_SHEETS[normalized];
if (!typeVeille) continue;
const sheet = workbook.Sheets[sheetName];
const rows = XLSX.utils.sheet_to_json<Record<string, unknown>>(sheet, { defval: null });
for (const row of rows) {
totalRows++;
const titre = normalizeStr(row["Titre"]);
if (!titre) { skippedRows++; continue; }
const lien = normalizeStr(row["Lien"]);
const dedupKey = makeDedupKey(titre, lien);
const existing = await db
.select({ id: veilleItems.id })
.from(veilleItems)
.where(eq(veilleItems.dedupKey, dedupKey))
.limit(1);
if (existing.length > 0) { skippedRows++; continue; }
// La colonne "Source" contient une date ISO dans ce fichier
const sourceRaw = row["Source"];
const datePublication = parseDate(sourceRaw);
const sourceStr = normalizeStr(sourceRaw instanceof Date ? null : sourceRaw);
try {
await db.insert(veilleItems).values({
dedupKey,
titre,
categorie: normalizeStr(row["Catégorie"]),
niveau: normalizeStr(row["Niveau"]),
territoire: normalizeStr(row["Territoire"]),
resume: (() => {
const raw = normalizeStr(row[" Résumé"] ?? row["Résumé"] ?? row["Resume"]);
return raw ? stripHtml(raw) : null;
})(),
source: sourceStr,
passage: normalizeStr(row["passage"] ?? row["Passage"]),
lien,
typeVeille,
datePublication,
});
newRows++;
} catch (e: unknown) {
const msg = e instanceof Error ? e.message : String(e);
errors.push(`[${sheetName}] ${titre.substring(0, 50)}: ${msg}`);
skippedRows++;
}
}
}
const status = errors.length === 0 ? "success" : newRows > 0 ? "partial" : "error";
await db.insert(importLogs).values({
fileType: "veille",
source: fileName,
status,
totalRows,
newRows,
skippedRows,
errorMessage: errors.length > 0 ? errors.join("\n") : null,
details: errors.length > 0 ? { errors } as Record<string, unknown> : null,
startedAt,
completedAt: new Date(),
});
return { totalRows, newRows, skippedRows, errors, status };
}
async function importAAPFromBuffer(buffer: Buffer, fileName: string) {
const startedAt = new Date();
const errors: string[] = [];
let totalRows = 0, newRows = 0, skippedRows = 0;
const db = await getDb();
if (!db) throw new Error("Base de données indisponible");
const workbook = XLSX.read(buffer, { type: "buffer", cellDates: true });
for (const sheetName of workbook.SheetNames) {
const normalized = sheetName.toLowerCase().trim();
const normalizedAcc = normalized.replace(/é/g, "e").replace(/è/g, "e").replace(/ê/g, "e");
const categorie = AAP_SHEETS[normalized] || AAP_SHEETS[normalizedAcc];
if (!categorie) continue;
const sheet = workbook.Sheets[sheetName];
const rows = XLSX.utils.sheet_to_json<Record<string, unknown>>(sheet, { defval: null });
for (const row of rows) {
totalRows++;
const titre = normalizeStr(row["Titre"]);
if (!titre) { skippedRows++; continue; }
const lien = normalizeStr(row["Lien"]);
const dedupKey = makeDedupKey(titre, lien);
const existing = await db
.select({ id: aapItems.id })
.from(aapItems)
.where(eq(aapItems.dedupKey, dedupKey))
.limit(1);
if (existing.length > 0) { skippedRows++; continue; }
const datePublication = parseDate(row["Date publication"]);
const dateCloture = parseDate(row["Date clôture"]);
try {
await db.insert(aapItems).values({
dedupKey,
titre,
categorie,
region: normalizeStr(row["Région"]),
departement: normalizeStr(row["Département"]),
dateCloture,
datePublication,
lien,
});
newRows++;
} catch (e: unknown) {
const msg = e instanceof Error ? e.message : String(e);
errors.push(`[${sheetName}] ${titre.substring(0, 50)}: ${msg}`);
skippedRows++;
}
}
}
const status = errors.length === 0 ? "success" : newRows > 0 ? "partial" : "error";
await db.insert(importLogs).values({
fileType: "aap",
source: fileName,
status,
totalRows,
newRows,
skippedRows,
errorMessage: errors.length > 0 ? errors.join("\n") : null,
details: errors.length > 0 ? { errors } as Record<string, unknown> : null,
startedAt,
completedAt: new Date(),
});
return { totalRows, newRows, skippedRows, errors, status };
}
// ─── Route POST /api/upload-excel ─────────────────────────────────────────────
router.post(
"/api/upload-excel",
upload.single("file"),
async (req: express.Request, res: express.Response): Promise<void> => {
try {
const fileType = req.body?.fileType as "veille" | "aap";
if (!fileType || !["veille", "aap"].includes(fileType)) {
res.status(400).json({ error: "fileType doit être 'veille' ou 'aap'" });
return;
}
if (!req.file) {
res.status(400).json({ error: "Aucun fichier reçu" });
return;
}
const buffer = req.file.buffer;
const fileName = req.file.originalname;
let result;
if (fileType === "veille") {
result = await importVeilleFromBuffer(buffer, fileName);
} else {
result = await importAAPFromBuffer(buffer, fileName);
}
res.json({
success: true,
fileType,
fileName,
...result,
});
} catch (e: unknown) {
const msg = e instanceof Error ? e.message : String(e);
console.error("[Upload] Erreur:", msg);
res.status(500).json({ error: msg });
}
}
);
export default router;

165
server/veille.test.ts Normal file
View File

@@ -0,0 +1,165 @@
import { describe, expect, it, vi, beforeEach } from "vitest";
import { appRouter } from "./routers";
import type { TrpcContext } from "./_core/context";
// ─── Helpers ─────────────────────────────────────────────────────────────────
function makeAdminCtx(): TrpcContext {
return {
user: {
id: 1,
openId: "admin-test",
email: "admin@itinova.fr",
name: "Admin Test",
loginMethod: "local",
role: "admin",
createdAt: new Date(),
updatedAt: new Date(),
lastSignedIn: new Date(),
},
req: { protocol: "https", headers: {} } as TrpcContext["req"],
res: {
clearCookie: vi.fn(),
cookie: vi.fn(),
} as unknown as TrpcContext["res"],
};
}
function makeUserCtx(): TrpcContext {
return {
user: {
id: 2,
openId: "user-test",
email: "user@itinova.fr",
name: "User Test",
loginMethod: "local",
role: "user",
createdAt: new Date(),
updatedAt: new Date(),
lastSignedIn: new Date(),
},
req: { protocol: "https", headers: {} } as TrpcContext["req"],
res: {
clearCookie: vi.fn(),
cookie: vi.fn(),
} as unknown as TrpcContext["res"],
};
}
function makeAnonCtx(): TrpcContext {
return {
user: null,
req: { protocol: "https", headers: {} } as TrpcContext["req"],
res: {
clearCookie: vi.fn(),
cookie: vi.fn(),
} as unknown as TrpcContext["res"],
};
}
// ─── Tests Auth ───────────────────────────────────────────────────────────────
describe("auth.logout", () => {
it("efface le cookie de session et retourne success", async () => {
const { ctx } = { ctx: makeAdminCtx() };
const clearedCookies: string[] = [];
ctx.res.clearCookie = (name: string) => { clearedCookies.push(name); };
const caller = appRouter.createCaller(ctx);
const result = await caller.auth.logout();
expect(result.success).toBe(true);
expect(clearedCookies.length).toBeGreaterThan(0);
});
it("retourne l'utilisateur connecté via auth.me", async () => {
const ctx = makeAdminCtx();
const caller = appRouter.createCaller(ctx);
const user = await caller.auth.me();
expect(user).not.toBeNull();
expect(user?.email).toBe("admin@itinova.fr");
});
it("retourne null pour un utilisateur non connecté", async () => {
const ctx = makeAnonCtx();
const caller = appRouter.createCaller(ctx);
const user = await caller.auth.me();
expect(user).toBeNull();
});
});
// ─── Tests protection admin ───────────────────────────────────────────────────
describe("protection admin", () => {
it("refuse l'accès aux logs pour un utilisateur non admin", async () => {
const ctx = makeUserCtx();
const caller = appRouter.createCaller(ctx);
await expect(caller.import.logs({ page: 1, pageSize: 10 })).rejects.toThrow();
});
it("refuse l'accès aux paramètres pour un utilisateur non admin", async () => {
const ctx = makeUserCtx();
const caller = appRouter.createCaller(ctx);
await expect(caller.settings.get()).rejects.toThrow();
});
it("refuse la gestion des utilisateurs pour un non admin", async () => {
const ctx = makeUserCtx();
const caller = appRouter.createCaller(ctx);
await expect(caller.users.list()).rejects.toThrow();
});
});
// ─── Tests accès public ───────────────────────────────────────────────────────
describe("accès public", () => {
it("veille.list est accessible sans authentification", async () => {
const ctx = makeAnonCtx();
const caller = appRouter.createCaller(ctx);
// Ne devrait pas lever d'erreur UNAUTHORIZED
const result = await caller.veille.list({ page: 1, pageSize: 10 });
expect(result).toHaveProperty("items");
expect(result).toHaveProperty("total");
});
it("aap.list est accessible sans authentification", async () => {
const ctx = makeAnonCtx();
const caller = appRouter.createCaller(ctx);
const result = await caller.aap.list({ page: 1, pageSize: 10 });
expect(result).toHaveProperty("items");
expect(result).toHaveProperty("total");
});
it("veille.filters est accessible sans authentification", async () => {
const ctx = makeAnonCtx();
const caller = appRouter.createCaller(ctx);
const result = await caller.veille.filters();
expect(result).toHaveProperty("categories");
expect(result).toHaveProperty("niveaux");
expect(result).toHaveProperty("territoires");
});
it("aap.filters est accessible sans authentification", async () => {
const ctx = makeAnonCtx();
const caller = appRouter.createCaller(ctx);
const result = await caller.aap.filters();
expect(result).toHaveProperty("regions");
expect(result).toHaveProperty("departements");
});
});
// ─── Tests import (admin seulement) ──────────────────────────────────────────
describe("import.run protection", () => {
it("refuse l'import pour un utilisateur non authentifié", async () => {
const ctx = makeAnonCtx();
const caller = appRouter.createCaller(ctx);
await expect(caller.import.run({ type: "all" })).rejects.toThrow();
});
it("refuse l'import pour un utilisateur standard", async () => {
const ctx = makeUserCtx();
const caller = appRouter.createCaller(ctx);
await expect(caller.import.run({ type: "all" })).rejects.toThrow();
});
});