Compare commits

..

20 Commits
1.17.0 ... dev

Author SHA1 Message Date
miloschwartz
e118e5b047 add list alises endpoint 2026-04-11 21:03:35 -07:00
miloschwartz
7e4e8ea266 add niceId to list user resources 2026-04-11 17:56:16 -07:00
Owen
2f386f8e47 Grandfather in old users 2026-04-11 16:59:43 -07:00
Owen
f4ea572f6b Fix #2828 2026-04-11 16:50:28 -07:00
Owen Schwartz
825df7da63 Merge pull request #2806 from jbelke/fix-invite-email-encoding
Fix invite email encoding
2026-04-11 16:37:49 -07:00
Owen Schwartz
cd34f0a7b0 Merge pull request #2799 from LaurenceJJones/fix/proxy-target-deletion
fix: use targetId as row identifier
2026-04-11 16:35:09 -07:00
Owen Schwartz
b1b22c439a Merge pull request #2825 from AdnanSilajdzic/fix/worldmap-hover-stuck-public
fix(analytics): prevent countries from getting stuck highlighted on world map
2026-04-11 16:32:32 -07:00
Owen
eac747849b Restrict namespaces to paid plans due to abuse 2026-04-11 14:22:00 -07:00
Adnan Silajdzic
1aedf9da0a fix(worldmap): avoid stuck country hover state 2026-04-10 14:37:48 +00:00
miloschwartz
840684aeba dont show wildcard in domain picker 2026-04-09 17:54:25 -04:00
miloschwartz
f57012eb90 dont show international domain warning when capital letter present 2026-04-09 17:06:04 -04:00
miloschwartz
34387d9859 simplify wildcard domain on non pangolin-dns 2026-04-09 17:04:28 -04:00
miloschwartz
80f5914fdd add pluto 2026-04-09 16:15:19 -04:00
miloschwartz
eaa70da4dd add pluto 2026-04-09 16:14:46 -04:00
Owen
466f137590 Fix migration by testing for orphans 2026-04-09 10:29:51 -04:00
Joshua Belke
028df8bf27 fix: remove encodeURIComponent from invite link email parameter
The @ symbol in email addresses was being encoded as %40 when
constructing invite URLs, causing broken or garbled links when
copied/shared by users.

- Remove encodeURIComponent(email) from server-side invite link
  construction in inviteUser.ts (both new invite and regenerate paths)
- Remove encodeURIComponent(email) from client-side redirect URLs in
  InviteStatusCard.tsx (login, signup, and useEffect redirect paths)
- Valid Zod-validated email addresses do not contain characters that
  require URL encoding for safe query parameter use (@ is permitted
  in query strings per RFC 3986 §3.4)
2026-04-07 14:58:27 -04:00
Owen
28ef5238c9 Add CODEOWNERS 2026-04-07 11:36:02 -04:00
Laurence
7d3d5b2b22 use targetid also on proxy create as that also has same issue 2026-04-06 14:17:04 +01:00
Laurence
81eba50c9a fix: use targetId as row identifier
fix: 2797
2026-04-06 14:03:33 +01:00
Owen
d948d2ec33 Try to prevent deadlocks 2026-04-03 22:55:04 -04:00
25 changed files with 628 additions and 160 deletions

1
.github/CODEOWNERS vendored Normal file
View File

@@ -0,0 +1 @@
* @oschwartz10612 @miloschwartz

View File

@@ -2113,9 +2113,11 @@
"addDomainToEnableCustomAuthPages": "Users will be able to access the organization's login page and complete resource authentication using this domain.", "addDomainToEnableCustomAuthPages": "Users will be able to access the organization's login page and complete resource authentication using this domain.",
"selectDomainForOrgAuthPage": "Select a domain for the organization's authentication page", "selectDomainForOrgAuthPage": "Select a domain for the organization's authentication page",
"domainPickerProvidedDomain": "Provided Domain", "domainPickerProvidedDomain": "Provided Domain",
"domainPickerFreeProvidedDomain": "Free Provided Domain", "domainPickerFreeProvidedDomain": "Provided Domain",
"domainPickerFreeDomainsPaidFeature": "Provided domains are a paid feature. Subscribe to get a domain included with your plan — no need to bring your own.",
"domainPickerVerified": "Verified", "domainPickerVerified": "Verified",
"domainPickerUnverified": "Unverified", "domainPickerUnverified": "Unverified",
"domainPickerManual": "Manual",
"domainPickerInvalidSubdomainStructure": "This subdomain contains invalid characters or structure. It will be sanitized automatically when you save.", "domainPickerInvalidSubdomainStructure": "This subdomain contains invalid characters or structure. It will be sanitized automatically when you save.",
"domainPickerError": "Error", "domainPickerError": "Error",
"domainPickerErrorLoadDomains": "Failed to load organization domains", "domainPickerErrorLoadDomains": "Failed to load organization domains",

View File

@@ -19,7 +19,8 @@ export enum TierFeature {
SshPam = "sshPam", SshPam = "sshPam",
FullRbac = "fullRbac", FullRbac = "fullRbac",
SiteProvisioningKeys = "siteProvisioningKeys", // handle downgrade by revoking keys if needed SiteProvisioningKeys = "siteProvisioningKeys", // handle downgrade by revoking keys if needed
SIEM = "siem" // handle downgrade by disabling SIEM integrations SIEM = "siem", // handle downgrade by disabling SIEM integrations
DomainNamespaces = "domainNamespaces" // handle downgrade by removing custom domain namespaces
} }
export const tierMatrix: Record<TierFeature, Tier[]> = { export const tierMatrix: Record<TierFeature, Tier[]> = {
@@ -56,5 +57,6 @@ export const tierMatrix: Record<TierFeature, Tier[]> = {
[TierFeature.SshPam]: ["tier1", "tier3", "enterprise"], [TierFeature.SshPam]: ["tier1", "tier3", "enterprise"],
[TierFeature.FullRbac]: ["tier1", "tier2", "tier3", "enterprise"], [TierFeature.FullRbac]: ["tier1", "tier2", "tier3", "enterprise"],
[TierFeature.SiteProvisioningKeys]: ["tier3", "enterprise"], [TierFeature.SiteProvisioningKeys]: ["tier3", "enterprise"],
[TierFeature.SIEM]: ["enterprise"] [TierFeature.SIEM]: ["enterprise"],
[TierFeature.DomainNamespaces]: ["tier1", "tier2", "tier3", "enterprise"]
}; };

View File

@@ -22,11 +22,15 @@ import { OpenAPITags, registry } from "@server/openApi";
import { db, domainNamespaces, resources } from "@server/db"; import { db, domainNamespaces, resources } from "@server/db";
import { inArray } from "drizzle-orm"; import { inArray } from "drizzle-orm";
import { CheckDomainAvailabilityResponse } from "@server/routers/domain/types"; import { CheckDomainAvailabilityResponse } from "@server/routers/domain/types";
import { build } from "@server/build";
import { isSubscribed } from "#private/lib/isSubscribed";
import { tierMatrix } from "@server/lib/billing/tierMatrix";
const paramsSchema = z.strictObject({}); const paramsSchema = z.strictObject({});
const querySchema = z.strictObject({ const querySchema = z.strictObject({
subdomain: z.string() subdomain: z.string(),
// orgId: build === "saas" ? z.string() : z.string().optional() // Required for saas, optional otherwise
}); });
registry.registerPath({ registry.registerPath({
@@ -58,6 +62,23 @@ export async function checkDomainNamespaceAvailability(
} }
const { subdomain } = parsedQuery.data; const { subdomain } = parsedQuery.data;
// if (
// build == "saas" &&
// !isSubscribed(orgId!, tierMatrix.domainNamespaces)
// ) {
// // return not available
// return response<CheckDomainAvailabilityResponse>(res, {
// data: {
// available: false,
// options: []
// },
// success: true,
// error: false,
// message: "Your current subscription does not support custom domain namespaces. Please upgrade to access this feature.",
// status: HttpCode.OK
// });
// }
const namespaces = await db.select().from(domainNamespaces); const namespaces = await db.select().from(domainNamespaces);
let possibleDomains = namespaces.map((ns) => { let possibleDomains = namespaces.map((ns) => {
const desired = `${subdomain}.${ns.domainNamespaceId}`; const desired = `${subdomain}.${ns.domainNamespaceId}`;

View File

@@ -22,6 +22,9 @@ import { eq, sql } from "drizzle-orm";
import logger from "@server/logger"; import logger from "@server/logger";
import { fromError } from "zod-validation-error"; import { fromError } from "zod-validation-error";
import { OpenAPITags, registry } from "@server/openApi"; import { OpenAPITags, registry } from "@server/openApi";
import { isSubscribed } from "#private/lib/isSubscribed";
import { build } from "@server/build";
import { tierMatrix } from "@server/lib/billing/tierMatrix";
const paramsSchema = z.strictObject({}); const paramsSchema = z.strictObject({});
@@ -37,7 +40,8 @@ const querySchema = z.strictObject({
.optional() .optional()
.default("0") .default("0")
.transform(Number) .transform(Number)
.pipe(z.int().nonnegative()) .pipe(z.int().nonnegative()),
// orgId: build === "saas" ? z.string() : z.string().optional() // Required for saas, optional otherwise
}); });
async function query(limit: number, offset: number) { async function query(limit: number, offset: number) {
@@ -99,6 +103,26 @@ export async function listDomainNamespaces(
); );
} }
// if (
// build == "saas" &&
// !isSubscribed(orgId!, tierMatrix.domainNamespaces)
// ) {
// return response<ListDomainNamespacesResponse>(res, {
// data: {
// domainNamespaces: [],
// pagination: {
// total: 0,
// limit,
// offset
// }
// },
// success: true,
// error: false,
// message: "No namespaces found. Your current subscription does not support custom domain namespaces. Please upgrade to access this feature.",
// status: HttpCode.OK
// });
// }
const domainNamespacesList = await query(limit, offset); const domainNamespacesList = await query(limit, offset);
const [{ count }] = await db const [{ count }] = await db

View File

@@ -440,6 +440,12 @@ authenticated.get(
resource.getUserResources resource.getUserResources
); );
authenticated.get(
"/org/:orgId/user-resource-aliases",
verifyOrgAccess,
resource.listUserResourceAliases
);
authenticated.get( authenticated.get(
"/org/:orgId/domains", "/org/:orgId/domains",
verifyOrgAccess, verifyOrgAccess,

View File

@@ -1,6 +1,6 @@
import { db } from "@server/db"; import { db } from "@server/db";
import { sites, clients, olms } from "@server/db"; import { sites, clients, olms } from "@server/db";
import { eq, inArray } from "drizzle-orm"; import { inArray } from "drizzle-orm";
import logger from "@server/logger"; import logger from "@server/logger";
/** /**
@@ -21,7 +21,7 @@ import logger from "@server/logger";
*/ */
const FLUSH_INTERVAL_MS = 10_000; // Flush every 10 seconds const FLUSH_INTERVAL_MS = 10_000; // Flush every 10 seconds
const MAX_RETRIES = 2; const MAX_RETRIES = 5;
const BASE_DELAY_MS = 50; const BASE_DELAY_MS = 50;
// ── Site (newt) pings ────────────────────────────────────────────────── // ── Site (newt) pings ──────────────────────────────────────────────────
@@ -36,6 +36,14 @@ const pendingOlmArchiveResets: Set<string> = new Set();
let flushTimer: NodeJS.Timeout | null = null; let flushTimer: NodeJS.Timeout | null = null;
/**
* Guard that prevents two flush cycles from running concurrently.
* setInterval does not await async callbacks, so without this a slow flush
* (e.g. due to DB latency) would overlap with the next scheduled cycle and
* the two concurrent bulk UPDATEs would deadlock each other.
*/
let isFlushing = false;
// ── Public API ───────────────────────────────────────────────────────── // ── Public API ─────────────────────────────────────────────────────────
/** /**
@@ -72,6 +80,12 @@ export function recordClientPing(
/** /**
* Flush all accumulated site pings to the database. * Flush all accumulated site pings to the database.
*
* Each batch of up to BATCH_SIZE rows is written with a **single** UPDATE
* statement. We use the maximum timestamp across the batch so that `lastPing`
* reflects the most recent ping seen for any site in the group. This avoids
* the multi-statement transaction that previously created additional
* row-lock ordering hazards.
*/ */
async function flushSitePingsToDb(): Promise<void> { async function flushSitePingsToDb(): Promise<void> {
if (pendingSitePings.size === 0) { if (pendingSitePings.size === 0) {
@@ -83,55 +97,35 @@ async function flushSitePingsToDb(): Promise<void> {
const pingsToFlush = new Map(pendingSitePings); const pingsToFlush = new Map(pendingSitePings);
pendingSitePings.clear(); pendingSitePings.clear();
// Sort by siteId for consistent lock ordering (prevents deadlocks) const entries = Array.from(pingsToFlush.entries());
const sortedEntries = Array.from(pingsToFlush.entries()).sort(
([a], [b]) => a - b
);
const BATCH_SIZE = 50; const BATCH_SIZE = 50;
for (let i = 0; i < sortedEntries.length; i += BATCH_SIZE) { for (let i = 0; i < entries.length; i += BATCH_SIZE) {
const batch = sortedEntries.slice(i, i + BATCH_SIZE); const batch = entries.slice(i, i + BATCH_SIZE);
// Use the latest timestamp in the batch so that `lastPing` always
// moves forward. Using a single timestamp for the whole batch means
// we only ever need one UPDATE statement (no transaction).
const maxTimestamp = Math.max(...batch.map(([, ts]) => ts));
const siteIds = batch.map(([id]) => id);
try { try {
await withRetry(async () => { await withRetry(async () => {
// Group by timestamp for efficient bulk updates await db
const byTimestamp = new Map<number, number[]>(); .update(sites)
for (const [siteId, timestamp] of batch) { .set({
const group = byTimestamp.get(timestamp) || []; online: true,
group.push(siteId); lastPing: maxTimestamp
byTimestamp.set(timestamp, group); })
} .where(inArray(sites.siteId, siteIds));
if (byTimestamp.size === 1) {
const [timestamp, siteIds] = Array.from(
byTimestamp.entries()
)[0];
await db
.update(sites)
.set({
online: true,
lastPing: timestamp
})
.where(inArray(sites.siteId, siteIds));
} else {
await db.transaction(async (tx) => {
for (const [timestamp, siteIds] of byTimestamp) {
await tx
.update(sites)
.set({
online: true,
lastPing: timestamp
})
.where(inArray(sites.siteId, siteIds));
}
});
}
}, "flushSitePingsToDb"); }, "flushSitePingsToDb");
} catch (error) { } catch (error) {
logger.error( logger.error(
`Failed to flush site ping batch (${batch.length} sites), re-queuing for next cycle`, `Failed to flush site ping batch (${batch.length} sites), re-queuing for next cycle`,
{ error } { error }
); );
// Re-queue only if the preserved timestamp is newer than any
// update that may have landed since we snapshotted.
for (const [siteId, timestamp] of batch) { for (const [siteId, timestamp] of batch) {
const existing = pendingSitePings.get(siteId); const existing = pendingSitePings.get(siteId);
if (!existing || existing < timestamp) { if (!existing || existing < timestamp) {
@@ -144,6 +138,8 @@ async function flushSitePingsToDb(): Promise<void> {
/** /**
* Flush all accumulated client (OLM) pings to the database. * Flush all accumulated client (OLM) pings to the database.
*
* Same single-UPDATE-per-batch approach as `flushSitePingsToDb`.
*/ */
async function flushClientPingsToDb(): Promise<void> { async function flushClientPingsToDb(): Promise<void> {
if (pendingClientPings.size === 0 && pendingOlmArchiveResets.size === 0) { if (pendingClientPings.size === 0 && pendingOlmArchiveResets.size === 0) {
@@ -159,51 +155,25 @@ async function flushClientPingsToDb(): Promise<void> {
// ── Flush client pings ───────────────────────────────────────────── // ── Flush client pings ─────────────────────────────────────────────
if (pingsToFlush.size > 0) { if (pingsToFlush.size > 0) {
const sortedEntries = Array.from(pingsToFlush.entries()).sort( const entries = Array.from(pingsToFlush.entries());
([a], [b]) => a - b
);
const BATCH_SIZE = 50; const BATCH_SIZE = 50;
for (let i = 0; i < sortedEntries.length; i += BATCH_SIZE) { for (let i = 0; i < entries.length; i += BATCH_SIZE) {
const batch = sortedEntries.slice(i, i + BATCH_SIZE); const batch = entries.slice(i, i + BATCH_SIZE);
const maxTimestamp = Math.max(...batch.map(([, ts]) => ts));
const clientIds = batch.map(([id]) => id);
try { try {
await withRetry(async () => { await withRetry(async () => {
const byTimestamp = new Map<number, number[]>(); await db
for (const [clientId, timestamp] of batch) { .update(clients)
const group = byTimestamp.get(timestamp) || []; .set({
group.push(clientId); lastPing: maxTimestamp,
byTimestamp.set(timestamp, group); online: true,
} archived: false
})
if (byTimestamp.size === 1) { .where(inArray(clients.clientId, clientIds));
const [timestamp, clientIds] = Array.from(
byTimestamp.entries()
)[0];
await db
.update(clients)
.set({
lastPing: timestamp,
online: true,
archived: false
})
.where(inArray(clients.clientId, clientIds));
} else {
await db.transaction(async (tx) => {
for (const [timestamp, clientIds] of byTimestamp) {
await tx
.update(clients)
.set({
lastPing: timestamp,
online: true,
archived: false
})
.where(
inArray(clients.clientId, clientIds)
);
}
});
}
}, "flushClientPingsToDb"); }, "flushClientPingsToDb");
} catch (error) { } catch (error) {
logger.error( logger.error(
@@ -260,7 +230,12 @@ export async function flushPingsToDb(): Promise<void> {
/** /**
* Simple retry wrapper with exponential backoff for transient errors * Simple retry wrapper with exponential backoff for transient errors
* (connection timeouts, unexpected disconnects). * (deadlocks, connection timeouts, unexpected disconnects).
*
* PostgreSQL deadlocks (40P01) are always safe to retry: the database
* guarantees exactly one winner per deadlock pair, so the loser just needs
* to try again. MAX_RETRIES is intentionally higher than typical connection
* retry budgets to give deadlock victims enough chances to succeed.
*/ */
async function withRetry<T>( async function withRetry<T>(
operation: () => Promise<T>, operation: () => Promise<T>,
@@ -277,7 +252,8 @@ async function withRetry<T>(
const jitter = Math.random() * baseDelay; const jitter = Math.random() * baseDelay;
const delay = baseDelay + jitter; const delay = baseDelay + jitter;
logger.warn( logger.warn(
`Transient DB error in ${context}, retrying attempt ${attempt}/${MAX_RETRIES} after ${delay.toFixed(0)}ms` `Transient DB error in ${context}, retrying attempt ${attempt}/${MAX_RETRIES} after ${delay.toFixed(0)}ms`,
{ code: error?.code ?? error?.cause?.code }
); );
await new Promise((resolve) => setTimeout(resolve, delay)); await new Promise((resolve) => setTimeout(resolve, delay));
continue; continue;
@@ -288,14 +264,14 @@ async function withRetry<T>(
} }
/** /**
* Detect transient connection errors that are safe to retry. * Detect transient errors that are safe to retry.
*/ */
function isTransientError(error: any): boolean { function isTransientError(error: any): boolean {
if (!error) return false; if (!error) return false;
const message = (error.message || "").toLowerCase(); const message = (error.message || "").toLowerCase();
const causeMessage = (error.cause?.message || "").toLowerCase(); const causeMessage = (error.cause?.message || "").toLowerCase();
const code = error.code || ""; const code = error.code || error.cause?.code || "";
// Connection timeout / terminated // Connection timeout / terminated
if ( if (
@@ -308,12 +284,17 @@ function isTransientError(error: any): boolean {
return true; return true;
} }
// PostgreSQL deadlock // PostgreSQL deadlock detected — always safe to retry (one winner guaranteed)
if (code === "40P01" || message.includes("deadlock")) { if (code === "40P01" || message.includes("deadlock")) {
return true; return true;
} }
// ECONNRESET, ECONNREFUSED, EPIPE // PostgreSQL serialization failure
if (code === "40001") {
return true;
}
// ECONNRESET, ECONNREFUSED, EPIPE, ETIMEDOUT
if ( if (
code === "ECONNRESET" || code === "ECONNRESET" ||
code === "ECONNREFUSED" || code === "ECONNREFUSED" ||
@@ -337,12 +318,26 @@ export function startPingAccumulator(): void {
} }
flushTimer = setInterval(async () => { flushTimer = setInterval(async () => {
// Skip this tick if the previous flush is still in progress.
// setInterval does not await async callbacks, so without this guard
// two flush cycles can run concurrently and deadlock each other on
// overlapping bulk UPDATE statements.
if (isFlushing) {
logger.debug(
"Ping accumulator: previous flush still in progress, skipping cycle"
);
return;
}
isFlushing = true;
try { try {
await flushPingsToDb(); await flushPingsToDb();
} catch (error) { } catch (error) {
logger.error("Unhandled error in ping accumulator flush", { logger.error("Unhandled error in ping accumulator flush", {
error error
}); });
} finally {
isFlushing = false;
} }
}, FLUSH_INTERVAL_MS); }, FLUSH_INTERVAL_MS);
@@ -364,7 +359,22 @@ export async function stopPingAccumulator(): Promise<void> {
flushTimer = null; flushTimer = null;
} }
// Final flush to persist any remaining pings // Final flush to persist any remaining pings.
// Wait for any in-progress flush to finish first so we don't race.
if (isFlushing) {
logger.debug(
"Ping accumulator: waiting for in-progress flush before stopping…"
);
await new Promise<void>((resolve) => {
const poll = setInterval(() => {
if (!isFlushing) {
clearInterval(poll);
resolve();
}
}, 50);
});
}
try { try {
await flushPingsToDb(); await flushPingsToDb();
} catch (error) { } catch (error) {
@@ -379,4 +389,4 @@ export async function stopPingAccumulator(): Promise<void> {
*/ */
export function getPendingPingCount(): number { export function getPendingPingCount(): number {
return pendingSitePings.size + pendingClientPings.size; return pendingSitePings.size + pendingClientPings.size;
} }

View File

@@ -1,6 +1,6 @@
import { Request, Response, NextFunction } from "express"; import { Request, Response, NextFunction } from "express";
import { z } from "zod"; import { z } from "zod";
import { db, loginPage } from "@server/db"; import { db, domainNamespaces, loginPage } from "@server/db";
import { import {
domains, domains,
orgDomains, orgDomains,
@@ -24,6 +24,8 @@ import { build } from "@server/build";
import { createCertificate } from "#dynamic/routers/certificates/createCertificate"; import { createCertificate } from "#dynamic/routers/certificates/createCertificate";
import { getUniqueResourceName } from "@server/db/names"; import { getUniqueResourceName } from "@server/db/names";
import { validateAndConstructDomain } from "@server/lib/domainUtils"; import { validateAndConstructDomain } from "@server/lib/domainUtils";
import { isSubscribed } from "#dynamic/lib/isSubscribed";
import { tierMatrix } from "@server/lib/billing/tierMatrix";
const createResourceParamsSchema = z.strictObject({ const createResourceParamsSchema = z.strictObject({
orgId: z.string() orgId: z.string()
@@ -112,7 +114,10 @@ export async function createResource(
const { orgId } = parsedParams.data; const { orgId } = parsedParams.data;
if (req.user && (!req.userOrgRoleIds || req.userOrgRoleIds.length === 0)) { if (
req.user &&
(!req.userOrgRoleIds || req.userOrgRoleIds.length === 0)
) {
return next( return next(
createHttpError(HttpCode.FORBIDDEN, "User does not have a role") createHttpError(HttpCode.FORBIDDEN, "User does not have a role")
); );
@@ -193,6 +198,29 @@ async function createHttpResource(
const subdomain = parsedBody.data.subdomain; const subdomain = parsedBody.data.subdomain;
const stickySession = parsedBody.data.stickySession; const stickySession = parsedBody.data.stickySession;
if (build == "saas" && !isSubscribed(orgId!, tierMatrix.domainNamespaces)) {
// grandfather in existing users
const lastAllowedDate = new Date("2026-04-12");
const userCreatedDate = new Date(req.user?.dateCreated || new Date());
if (userCreatedDate > lastAllowedDate) {
// check if this domain id is a namespace domain and if so, reject
const domain = await db
.select()
.from(domainNamespaces)
.where(eq(domainNamespaces.domainId, domainId))
.limit(1);
if (domain.length > 0) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
"Your current subscription does not support custom domain namespaces. Please upgrade to access this feature."
)
);
}
}
}
// Validate domain and construct full domain // Validate domain and construct full domain
const domainResult = await validateAndConstructDomain( const domainResult = await validateAndConstructDomain(
domainId, domainId,

View File

@@ -142,6 +142,7 @@ export async function getUserResources(
let siteResourcesData: Array<{ let siteResourcesData: Array<{
siteResourceId: number; siteResourceId: number;
name: string; name: string;
niceId: string;
destination: string; destination: string;
mode: string; mode: string;
protocol: string | null; protocol: string | null;
@@ -154,6 +155,7 @@ export async function getUserResources(
.select({ .select({
siteResourceId: siteResources.siteResourceId, siteResourceId: siteResources.siteResourceId,
name: siteResources.name, name: siteResources.name,
niceId: siteResources.niceId,
destination: siteResources.destination, destination: siteResources.destination,
mode: siteResources.mode, mode: siteResources.mode,
protocol: siteResources.protocol, protocol: siteResources.protocol,
@@ -249,7 +251,7 @@ export async function getUserResources(
}); });
return response(res, { return response(res, {
data: { data: {
resources: resourcesWithAuth, resources: resourcesWithAuth,
siteResources: siteResourcesFormatted siteResources: siteResourcesFormatted
}, },

View File

@@ -22,6 +22,7 @@ export * from "./deleteResourceRule";
export * from "./listResourceRules"; export * from "./listResourceRules";
export * from "./updateResourceRule"; export * from "./updateResourceRule";
export * from "./getUserResources"; export * from "./getUserResources";
export * from "./listUserResourceAliases";
export * from "./setResourceHeaderAuth"; export * from "./setResourceHeaderAuth";
export * from "./addEmailToResourceWhitelist"; export * from "./addEmailToResourceWhitelist";
export * from "./removeEmailFromResourceWhitelist"; export * from "./removeEmailFromResourceWhitelist";

View File

@@ -0,0 +1,262 @@
import { Request, Response, NextFunction } from "express";
import {
db,
siteResources,
userSiteResources,
roleSiteResources,
userOrgRoles,
userOrgs
} from "@server/db";
import { and, eq, inArray, asc, isNotNull, ne } from "drizzle-orm";
import createHttpError from "http-errors";
import HttpCode from "@server/types/HttpCode";
import response from "@server/lib/response";
import logger from "@server/logger";
import { z } from "zod";
import { fromZodError } from "zod-validation-error";
import type { PaginatedResponse } from "@server/types/Pagination";
import { OpenAPITags, registry } from "@server/openApi";
import { localCache } from "#dynamic/lib/cache";
const USER_RESOURCE_ALIASES_CACHE_TTL_SEC = 60;
function userResourceAliasesCacheKey(
orgId: string,
userId: string,
page: number,
pageSize: number
) {
return `userResourceAliases:${orgId}:${userId}:${page}:${pageSize}`;
}
const listUserResourceAliasesParamsSchema = z.strictObject({
orgId: z.string()
});
const listUserResourceAliasesQuerySchema = z.object({
pageSize: z.coerce
.number<string>()
.int()
.positive()
.optional()
.catch(20)
.default(20)
.openapi({
type: "integer",
default: 20,
description: "Number of items per page"
}),
page: z.coerce
.number<string>()
.int()
.min(0)
.optional()
.catch(1)
.default(1)
.openapi({
type: "integer",
default: 1,
description: "Page number to retrieve"
})
});
export type ListUserResourceAliasesResponse = PaginatedResponse<{
aliases: string[];
}>;
// registry.registerPath({
// method: "get",
// path: "/org/{orgId}/user-resource-aliases",
// description:
// "List private (host-mode) site resource aliases the authenticated user can access in the organization, paginated.",
// tags: [OpenAPITags.PrivateResource],
// request: {
// params: z.object({
// orgId: z.string()
// }),
// query: listUserResourceAliasesQuerySchema
// },
// responses: {}
// });
export async function listUserResourceAliases(
req: Request,
res: Response,
next: NextFunction
): Promise<any> {
try {
const parsedQuery = listUserResourceAliasesQuerySchema.safeParse(
req.query
);
if (!parsedQuery.success) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
fromZodError(parsedQuery.error)
)
);
}
const { page, pageSize } = parsedQuery.data;
const parsedParams = listUserResourceAliasesParamsSchema.safeParse(
req.params
);
if (!parsedParams.success) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
fromZodError(parsedParams.error)
)
);
}
const { orgId } = parsedParams.data;
const userId = req.user?.userId;
if (!userId) {
return next(
createHttpError(HttpCode.UNAUTHORIZED, "User not authenticated")
);
}
const [userOrg] = await db
.select()
.from(userOrgs)
.where(and(eq(userOrgs.userId, userId), eq(userOrgs.orgId, orgId)))
.limit(1);
if (!userOrg) {
return next(
createHttpError(HttpCode.FORBIDDEN, "User not in organization")
);
}
const cacheKey = userResourceAliasesCacheKey(
orgId,
userId,
page,
pageSize
);
const cachedData: ListUserResourceAliasesResponse | undefined =
localCache.get(cacheKey);
if (cachedData) {
return response<ListUserResourceAliasesResponse>(res, {
data: cachedData,
success: true,
error: false,
message: "User resource aliases retrieved successfully",
status: HttpCode.OK
});
}
const userRoleIds = await db
.select({ roleId: userOrgRoles.roleId })
.from(userOrgRoles)
.where(
and(
eq(userOrgRoles.userId, userId),
eq(userOrgRoles.orgId, orgId)
)
)
.then((rows) => rows.map((r) => r.roleId));
const directSiteResourcesQuery = db
.select({ siteResourceId: userSiteResources.siteResourceId })
.from(userSiteResources)
.where(eq(userSiteResources.userId, userId));
const roleSiteResourcesQuery =
userRoleIds.length > 0
? db
.select({
siteResourceId: roleSiteResources.siteResourceId
})
.from(roleSiteResources)
.where(inArray(roleSiteResources.roleId, userRoleIds))
: Promise.resolve([]);
const [directSiteResourceResults, roleSiteResourceResults] =
await Promise.all([
directSiteResourcesQuery,
roleSiteResourcesQuery
]);
const accessibleSiteResourceIds = [
...directSiteResourceResults.map((r) => r.siteResourceId),
...roleSiteResourceResults.map((r) => r.siteResourceId)
];
if (accessibleSiteResourceIds.length === 0) {
const data: ListUserResourceAliasesResponse = {
aliases: [],
pagination: {
total: 0,
pageSize,
page
}
};
localCache.set(cacheKey, data, USER_RESOURCE_ALIASES_CACHE_TTL_SEC);
return response<ListUserResourceAliasesResponse>(res, {
data,
success: true,
error: false,
message: "User resource aliases retrieved successfully",
status: HttpCode.OK
});
}
const whereClause = and(
eq(siteResources.orgId, orgId),
eq(siteResources.enabled, true),
eq(siteResources.mode, "host"),
isNotNull(siteResources.alias),
ne(siteResources.alias, ""),
inArray(siteResources.siteResourceId, accessibleSiteResourceIds)
);
const baseSelect = () =>
db
.select({ alias: siteResources.alias })
.from(siteResources)
.where(whereClause);
const countQuery = db.$count(baseSelect().as("filtered_aliases"));
const [rows, totalCount] = await Promise.all([
baseSelect()
.orderBy(asc(siteResources.alias))
.limit(pageSize)
.offset(pageSize * (page - 1)),
countQuery
]);
const aliases = rows.map((r) => r.alias as string);
const data: ListUserResourceAliasesResponse = {
aliases,
pagination: {
total: totalCount,
pageSize,
page
}
};
localCache.set(cacheKey, data, USER_RESOURCE_ALIASES_CACHE_TTL_SEC);
return response<ListUserResourceAliasesResponse>(res, {
data,
success: true,
error: false,
message: "User resource aliases retrieved successfully",
status: HttpCode.OK
});
} catch (error) {
logger.error(error);
return next(
createHttpError(
HttpCode.INTERNAL_SERVER_ERROR,
"Internal server error"
)
);
}
}

View File

@@ -1,6 +1,6 @@
import { Request, Response, NextFunction } from "express"; import { Request, Response, NextFunction } from "express";
import { z } from "zod"; import { z } from "zod";
import { db, loginPage } from "@server/db"; import { db, domainNamespaces, loginPage } from "@server/db";
import { import {
domains, domains,
Org, Org,
@@ -25,6 +25,7 @@ import { validateAndConstructDomain } from "@server/lib/domainUtils";
import { build } from "@server/build"; import { build } from "@server/build";
import { isLicensedOrSubscribed } from "#dynamic/lib/isLicencedOrSubscribed"; import { isLicensedOrSubscribed } from "#dynamic/lib/isLicencedOrSubscribed";
import { tierMatrix } from "@server/lib/billing/tierMatrix"; import { tierMatrix } from "@server/lib/billing/tierMatrix";
import { isSubscribed } from "#dynamic/lib/isSubscribed";
const updateResourceParamsSchema = z.strictObject({ const updateResourceParamsSchema = z.strictObject({
resourceId: z.string().transform(Number).pipe(z.int().positive()) resourceId: z.string().transform(Number).pipe(z.int().positive())
@@ -120,7 +121,9 @@ const updateHttpResourceBodySchema = z
if (data.headers) { if (data.headers) {
// HTTP header values must be visible ASCII or horizontal whitespace, no control chars (RFC 7230) // HTTP header values must be visible ASCII or horizontal whitespace, no control chars (RFC 7230)
const validHeaderValue = /^[\t\x20-\x7E]*$/; const validHeaderValue = /^[\t\x20-\x7E]*$/;
return data.headers.every((h) => validHeaderValue.test(h.value)); return data.headers.every((h) =>
validHeaderValue.test(h.value)
);
} }
return true; return true;
}, },
@@ -318,6 +321,34 @@ async function updateHttpResource(
if (updateData.domainId) { if (updateData.domainId) {
const domainId = updateData.domainId; const domainId = updateData.domainId;
if (
build == "saas" &&
!isSubscribed(resource.orgId, tierMatrix.domainNamespaces)
) {
// grandfather in existing users
const lastAllowedDate = new Date("2026-04-12");
const userCreatedDate = new Date(
req.user?.dateCreated || new Date()
);
if (userCreatedDate > lastAllowedDate) {
// check if this domain id is a namespace domain and if so, reject
const domain = await db
.select()
.from(domainNamespaces)
.where(eq(domainNamespaces.domainId, domainId))
.limit(1);
if (domain.length > 0) {
return next(
createHttpError(
HttpCode.BAD_REQUEST,
"Your current subscription does not support custom domain namespaces. Please upgrade to access this feature."
)
);
}
}
}
// Validate domain and construct full domain // Validate domain and construct full domain
const domainResult = await validateAndConstructDomain( const domainResult = await validateAndConstructDomain(
domainId, domainId,
@@ -366,7 +397,7 @@ async function updateHttpResource(
); );
} }
} }
if (build != "oss") { if (build != "oss") {
const existingLoginPages = await db const existingLoginPages = await db
.select() .select()

View File

@@ -1,7 +1,14 @@
import { Request, Response, NextFunction } from "express"; import { Request, Response, NextFunction } from "express";
import { z } from "zod"; import { z } from "zod";
import { db } from "@server/db"; import { db } from "@server/db";
import { orgs, roles, userInviteRoles, userInvites, userOrgs, users } from "@server/db"; import {
orgs,
roles,
userInviteRoles,
userInvites,
userOrgs,
users
} from "@server/db";
import { and, eq, inArray } from "drizzle-orm"; import { and, eq, inArray } from "drizzle-orm";
import response from "@server/lib/response"; import response from "@server/lib/response";
import HttpCode from "@server/types/HttpCode"; import HttpCode from "@server/types/HttpCode";
@@ -37,8 +44,7 @@ const inviteUserBodySchema = z
regenerate: z.boolean().optional() regenerate: z.boolean().optional()
}) })
.refine( .refine(
(d) => (d) => (d.roleIds != null && d.roleIds.length > 0) || d.roleId != null,
(d.roleIds != null && d.roleIds.length > 0) || d.roleId != null,
{ message: "roleIds or roleId is required", path: ["roleIds"] } { message: "roleIds or roleId is required", path: ["roleIds"] }
) )
.transform((data) => ({ .transform((data) => ({
@@ -265,7 +271,7 @@ export async function inviteUser(
) )
); );
const inviteLink = `${config.getRawConfig().app.dashboard_url}/invite?token=${inviteId}-${token}&email=${encodeURIComponent(email)}`; const inviteLink = `${config.getRawConfig().app.dashboard_url}/invite?token=${inviteId}-${token}&email=${email}`;
if (doEmail) { if (doEmail) {
await sendEmail( await sendEmail(
@@ -314,12 +320,12 @@ export async function inviteUser(
expiresAt, expiresAt,
tokenHash tokenHash
}); });
await trx.insert(userInviteRoles).values( await trx
uniqueRoleIds.map((roleId) => ({ inviteId, roleId })) .insert(userInviteRoles)
); .values(uniqueRoleIds.map((roleId) => ({ inviteId, roleId })));
}); });
const inviteLink = `${config.getRawConfig().app.dashboard_url}/invite?token=${inviteId}-${token}&email=${encodeURIComponent(email)}`; const inviteLink = `${config.getRawConfig().app.dashboard_url}/invite?token=${inviteId}-${token}&email=${email}`;
if (doEmail) { if (doEmail) {
await sendEmail( await sendEmail(

View File

@@ -235,7 +235,9 @@ export default async function migration() {
for (const row of existingUserInviteRoles) { for (const row of existingUserInviteRoles) {
await db.execute(sql` await db.execute(sql`
INSERT INTO "userInviteRoles" ("inviteId", "roleId") INSERT INTO "userInviteRoles" ("inviteId", "roleId")
VALUES (${row.inviteId}, ${row.roleId}) SELECT ${row.inviteId}, ${row.roleId}
WHERE EXISTS (SELECT 1 FROM "userInvites" WHERE "inviteId" = ${row.inviteId})
AND EXISTS (SELECT 1 FROM "roles" WHERE "roleId" = ${row.roleId})
ON CONFLICT DO NOTHING ON CONFLICT DO NOTHING
`); `);
} }
@@ -258,7 +260,10 @@ export default async function migration() {
for (const row of existingUserOrgRoles) { for (const row of existingUserOrgRoles) {
await db.execute(sql` await db.execute(sql`
INSERT INTO "userOrgRoles" ("userId", "orgId", "roleId") INSERT INTO "userOrgRoles" ("userId", "orgId", "roleId")
VALUES (${row.userId}, ${row.orgId}, ${row.roleId}) SELECT ${row.userId}, ${row.orgId}, ${row.roleId}
WHERE EXISTS (SELECT 1 FROM "user" WHERE "id" = ${row.userId})
AND EXISTS (SELECT 1 FROM "orgs" WHERE "orgId" = ${row.orgId})
AND EXISTS (SELECT 1 FROM "roles" WHERE "roleId" = ${row.roleId})
ON CONFLICT DO NOTHING ON CONFLICT DO NOTHING
`); `);
} }

View File

@@ -145,7 +145,7 @@ export default async function migration() {
).run(); ).run();
db.prepare( db.prepare(
`INSERT INTO '__new_userOrgs'("userId", "orgId", "isOwner", "autoProvisioned", "pamUsername") SELECT "userId", "orgId", "isOwner", "autoProvisioned", "pamUsername" FROM 'userOrgs';` `INSERT INTO '__new_userOrgs'("userId", "orgId", "isOwner", "autoProvisioned", "pamUsername") SELECT "userId", "orgId", "isOwner", "autoProvisioned", "pamUsername" FROM 'userOrgs' WHERE EXISTS (SELECT 1 FROM 'user' WHERE id = userOrgs.userId) AND EXISTS (SELECT 1 FROM 'orgs' WHERE orgId = userOrgs.orgId);`
).run(); ).run();
db.prepare(`DROP TABLE 'userOrgs';`).run(); db.prepare(`DROP TABLE 'userOrgs';`).run();
db.prepare( db.prepare(
@@ -246,12 +246,15 @@ export default async function migration() {
// Re-insert the preserved invite role assignments into the new userInviteRoles table // Re-insert the preserved invite role assignments into the new userInviteRoles table
if (existingUserInviteRoles.length > 0) { if (existingUserInviteRoles.length > 0) {
const insertUserInviteRole = db.prepare( const insertUserInviteRole = db.prepare(
`INSERT OR IGNORE INTO 'userInviteRoles' ("inviteId", "roleId") VALUES (?, ?)` `INSERT OR IGNORE INTO 'userInviteRoles' ("inviteId", "roleId")
SELECT ?, ?
WHERE EXISTS (SELECT 1 FROM 'userInvites' WHERE inviteId = ?)
AND EXISTS (SELECT 1 FROM 'roles' WHERE roleId = ?)`
); );
const insertAll = db.transaction(() => { const insertAll = db.transaction(() => {
for (const row of existingUserInviteRoles) { for (const row of existingUserInviteRoles) {
insertUserInviteRole.run(row.inviteId, row.roleId); insertUserInviteRole.run(row.inviteId, row.roleId, row.inviteId, row.roleId);
} }
}); });
@@ -265,12 +268,16 @@ export default async function migration() {
// Re-insert the preserved role assignments into the new userOrgRoles table // Re-insert the preserved role assignments into the new userOrgRoles table
if (existingUserOrgRoles.length > 0) { if (existingUserOrgRoles.length > 0) {
const insertUserOrgRole = db.prepare( const insertUserOrgRole = db.prepare(
`INSERT OR IGNORE INTO 'userOrgRoles' ("userId", "orgId", "roleId") VALUES (?, ?, ?)` `INSERT OR IGNORE INTO 'userOrgRoles' ("userId", "orgId", "roleId")
SELECT ?, ?, ?
WHERE EXISTS (SELECT 1 FROM 'user' WHERE id = ?)
AND EXISTS (SELECT 1 FROM 'orgs' WHERE orgId = ?)
AND EXISTS (SELECT 1 FROM 'roles' WHERE roleId = ?)`
); );
const insertAll = db.transaction(() => { const insertAll = db.transaction(() => {
for (const row of existingUserOrgRoles) { for (const row of existingUserOrgRoles) {
insertUserOrgRole.run(row.userId, row.orgId, row.roleId); insertUserOrgRole.run(row.userId, row.orgId, row.roleId, row.userId, row.orgId, row.roleId);
} }
}); });

View File

@@ -10,6 +10,7 @@ import { authCookieHeader } from "@app/lib/api/cookies";
import { GetDNSRecordsResponse } from "@server/routers/domain"; import { GetDNSRecordsResponse } from "@server/routers/domain";
import DNSRecordsTable from "@app/components/DNSRecordTable"; import DNSRecordsTable from "@app/components/DNSRecordTable";
import DomainCertForm from "@app/components/DomainCertForm"; import DomainCertForm from "@app/components/DomainCertForm";
import { build } from "@server/build";
interface DomainSettingsPageProps { interface DomainSettingsPageProps {
params: Promise<{ domainId: string; orgId: string }>; params: Promise<{ domainId: string; orgId: string }>;
@@ -65,12 +66,14 @@ export default async function DomainSettingsPage({
)} )}
</div> </div>
<div className="space-y-6"> <div className="space-y-6">
<DomainInfoCard {build != "oss" && env.flags.usePangolinDns ? (
failed={domain.failed} <DomainInfoCard
verified={domain.verified} failed={domain.failed}
type={domain.type} verified={domain.verified}
errorMessage={domain.errorMessage} type={domain.type}
/> errorMessage={domain.errorMessage}
/>
) : null}
<DNSRecordsTable records={dnsRecords} type={domain.type} /> <DNSRecordsTable records={dnsRecords} type={domain.type} />

View File

@@ -678,6 +678,7 @@ function ProxyResourceTargetsForm({
getPaginationRowModel: getPaginationRowModel(), getPaginationRowModel: getPaginationRowModel(),
getSortedRowModel: getSortedRowModel(), getSortedRowModel: getSortedRowModel(),
getFilteredRowModel: getFilteredRowModel(), getFilteredRowModel: getFilteredRowModel(),
getRowId: (row) => String(row.targetId),
state: { state: {
pagination: { pagination: {
pageIndex: 0, pageIndex: 0,

View File

@@ -999,6 +999,7 @@ export default function Page() {
getPaginationRowModel: getPaginationRowModel(), getPaginationRowModel: getPaginationRowModel(),
getSortedRowModel: getSortedRowModel(), getSortedRowModel: getSortedRowModel(),
getFilteredRowModel: getFilteredRowModel(), getFilteredRowModel: getFilteredRowModel(),
getRowId: (row) => String(row.targetId),
state: { state: {
pagination: { pagination: {
pageIndex: 0, pageIndex: 0,

View File

@@ -154,7 +154,7 @@ export default function CreateDomainForm({
const punycodePreview = useMemo(() => { const punycodePreview = useMemo(() => {
if (!baseDomain) return ""; if (!baseDomain) return "";
const punycode = toPunycode(baseDomain); const punycode = toPunycode(baseDomain.toLowerCase());
return punycode !== baseDomain.toLowerCase() ? punycode : ""; return punycode !== baseDomain.toLowerCase() ? punycode : "";
}, [baseDomain]); }, [baseDomain]);
@@ -239,21 +239,24 @@ export default function CreateDomainForm({
className="space-y-4" className="space-y-4"
id="create-domain-form" id="create-domain-form"
> >
<FormField {build != "oss" && env.flags.usePangolinDns ? (
control={form.control} <FormField
name="type" control={form.control}
render={({ field }) => ( name="type"
<FormItem> render={({ field }) => (
<StrategySelect <FormItem>
options={domainOptions} <StrategySelect
defaultValue={field.value} options={domainOptions}
onChange={field.onChange} defaultValue={field.value}
cols={1} onChange={field.onChange}
/> cols={1}
<FormMessage /> />
</FormItem> <FormMessage />
)} </FormItem>
/> )}
/>
) : null}
<FormField <FormField
control={form.control} control={form.control}
name="baseDomain" name="baseDomain"

View File

@@ -319,6 +319,7 @@ export default function DeviceLoginForm({
<div className="flex justify-center"> <div className="flex justify-center">
<InputOTP <InputOTP
maxLength={9} maxLength={9}
pattern={REGEXP_ONLY_DIGITS_AND_CHARS}
{...field} {...field}
value={field.value value={field.value
.replace(/-/g, "") .replace(/-/g, "")

View File

@@ -2,6 +2,7 @@
import { Alert, AlertDescription } from "@/components/ui/alert"; import { Alert, AlertDescription } from "@/components/ui/alert";
import { Button } from "@/components/ui/button"; import { Button } from "@/components/ui/button";
import { Card, CardContent } from "@/components/ui/card";
import { import {
Command, Command,
CommandEmpty, CommandEmpty,
@@ -40,9 +41,12 @@ import {
Check, Check,
CheckCircle2, CheckCircle2,
ChevronsUpDown, ChevronsUpDown,
KeyRound,
Zap Zap
} from "lucide-react"; } from "lucide-react";
import { useTranslations } from "next-intl"; import { useTranslations } from "next-intl";
import { usePaidStatus } from "@/hooks/usePaidStatus";
import { TierFeature, tierMatrix } from "@server/lib/billing/tierMatrix";
import { toUnicode } from "punycode"; import { toUnicode } from "punycode";
import { useCallback, useEffect, useMemo, useState } from "react"; import { useCallback, useEffect, useMemo, useState } from "react";
@@ -95,6 +99,7 @@ export default function DomainPicker({
const { env } = useEnvContext(); const { env } = useEnvContext();
const api = createApiClient({ env }); const api = createApiClient({ env });
const t = useTranslations(); const t = useTranslations();
const { hasSaasSubscription } = usePaidStatus();
const { data = [], isLoading: loadingDomains } = useQuery( const { data = [], isLoading: loadingDomains } = useQuery(
orgQueries.domains({ orgId }) orgQueries.domains({ orgId })
@@ -509,9 +514,11 @@ export default function DomainPicker({
<span className="truncate"> <span className="truncate">
{selectedBaseDomain.domain} {selectedBaseDomain.domain}
</span> </span>
{selectedBaseDomain.verified && ( {selectedBaseDomain.verified &&
<CheckCircle2 className="h-3 w-3 text-green-500 shrink-0" /> selectedBaseDomain.domainType !==
)} "wildcard" && (
<CheckCircle2 className="h-3 w-3 text-green-500 shrink-0" />
)}
</div> </div>
) : ( ) : (
t("domainPickerSelectBaseDomain") t("domainPickerSelectBaseDomain")
@@ -574,14 +581,23 @@ export default function DomainPicker({
} }
</span> </span>
<span className="text-xs text-muted-foreground"> <span className="text-xs text-muted-foreground">
{orgDomain.type.toUpperCase()}{" "} {orgDomain.type ===
{" "} "wildcard"
{orgDomain.verified
? t( ? t(
"domainPickerVerified" "domainPickerManual"
) )
: t( : (
"domainPickerUnverified" <>
{orgDomain.type.toUpperCase()}{" "}
{" "}
{orgDomain.verified
? t(
"domainPickerVerified"
)
: t(
"domainPickerUnverified"
)}
</>
)} )}
</span> </span>
</div> </div>
@@ -680,6 +696,23 @@ export default function DomainPicker({
</div> </div>
</div> </div>
{build === "saas" &&
!hasSaasSubscription(
tierMatrix[TierFeature.DomainNamespaces]
) &&
!hideFreeDomain && (
<Card className="mt-3 border-black-500/30 bg-linear-to-br from-black-500/10 via-background to-background overflow-hidden">
<CardContent className="py-3 px-4">
<div className="flex items-center gap-2.5 text-sm text-muted-foreground">
<KeyRound className="size-4 shrink-0 text-black-500" />
<span>
{t("domainPickerFreeDomainsPaidFeature")}
</span>
</div>
</CardContent>
</Card>
)}
{/*showProvidedDomainSearch && build === "saas" && ( {/*showProvidedDomainSearch && build === "saas" && (
<Alert> <Alert>
<AlertCircle className="h-4 w-4" /> <AlertCircle className="h-4 w-4" />

View File

@@ -39,7 +39,11 @@ export default function InviteStatusCard({
const [loading, setLoading] = useState(true); const [loading, setLoading] = useState(true);
const [error, setError] = useState(""); const [error, setError] = useState("");
const [type, setType] = useState< const [type, setType] = useState<
"rejected" | "wrong_user" | "user_does_not_exist" | "not_logged_in" | "user_limit_exceeded" | "rejected"
| "wrong_user"
| "user_does_not_exist"
| "not_logged_in"
| "user_limit_exceeded"
>("rejected"); >("rejected");
useEffect(() => { useEffect(() => {
@@ -90,12 +94,12 @@ export default function InviteStatusCard({
if (!user && type === "user_does_not_exist") { if (!user && type === "user_does_not_exist") {
const redirectUrl = email const redirectUrl = email
? `/auth/signup?redirect=/invite?token=${tokenParam}&email=${encodeURIComponent(email)}` ? `/auth/signup?redirect=/invite?token=${tokenParam}&email=${email}`
: `/auth/signup?redirect=/invite?token=${tokenParam}`; : `/auth/signup?redirect=/invite?token=${tokenParam}`;
router.push(redirectUrl); router.push(redirectUrl);
} else if (!user && type === "not_logged_in") { } else if (!user && type === "not_logged_in") {
const redirectUrl = email const redirectUrl = email
? `/auth/login?redirect=/invite?token=${tokenParam}&email=${encodeURIComponent(email)}` ? `/auth/login?redirect=/invite?token=${tokenParam}&email=${email}`
: `/auth/login?redirect=/invite?token=${tokenParam}`; : `/auth/login?redirect=/invite?token=${tokenParam}`;
router.push(redirectUrl); router.push(redirectUrl);
} else { } else {
@@ -109,7 +113,7 @@ export default function InviteStatusCard({
async function goToLogin() { async function goToLogin() {
await api.post("/auth/logout", {}); await api.post("/auth/logout", {});
const redirectUrl = email const redirectUrl = email
? `/auth/login?redirect=/invite?token=${tokenParam}&email=${encodeURIComponent(email)}` ? `/auth/login?redirect=/invite?token=${tokenParam}&email=${email}`
: `/auth/login?redirect=/invite?token=${tokenParam}`; : `/auth/login?redirect=/invite?token=${tokenParam}`;
router.push(redirectUrl); router.push(redirectUrl);
} }
@@ -117,7 +121,7 @@ export default function InviteStatusCard({
async function goToSignup() { async function goToSignup() {
await api.post("/auth/logout", {}); await api.post("/auth/logout", {});
const redirectUrl = email const redirectUrl = email
? `/auth/signup?redirect=/invite?token=${tokenParam}&email=${encodeURIComponent(email)}` ? `/auth/signup?redirect=/invite?token=${tokenParam}&email=${email}`
: `/auth/signup?redirect=/invite?token=${tokenParam}`; : `/auth/signup?redirect=/invite?token=${tokenParam}`;
router.push(redirectUrl); router.push(redirectUrl);
} }
@@ -157,7 +161,9 @@ export default function InviteStatusCard({
Cannot Accept Invite Cannot Accept Invite
</p> </p>
<p className="text-center text-sm"> <p className="text-center text-sm">
This organization has reached its user limit. Please contact the organization administrator to upgrade their plan before accepting this invite. This organization has reached its user limit. Please
contact the organization administrator to upgrade their
plan before accepting this invite.
</p> </p>
</div> </div>
); );

View File

@@ -333,7 +333,8 @@ export default function PendingSitesTable({
"jupiter", "jupiter",
"saturn", "saturn",
"uranus", "uranus",
"neptune" "neptune",
"pluto"
].includes(originalRow.exitNodeName.toLowerCase()); ].includes(originalRow.exitNodeName.toLowerCase());
if (isCloudNode) { if (isCloudNode) {

View File

@@ -342,7 +342,8 @@ export default function SitesTable({
"jupiter", "jupiter",
"saturn", "saturn",
"uranus", "uranus",
"neptune" "neptune",
"pluto"
].includes(originalRow.exitNodeName.toLowerCase()); ].includes(originalRow.exitNodeName.toLowerCase());
if (isCloudNode) { if (isCloudNode) {

View File

@@ -164,7 +164,7 @@ const countryClass = cn(
const highlightedCountryClass = cn( const highlightedCountryClass = cn(
sharedCountryClass, sharedCountryClass,
"stroke-2", "stroke-[3]",
"fill-[#f4f4f5]", "fill-[#f4f4f5]",
"stroke-[#f36117]", "stroke-[#f36117]",
"dark:fill-[#3f3f46]" "dark:fill-[#3f3f46]"
@@ -194,11 +194,20 @@ function drawInteractiveCountries(
const path = setupProjetionPath(); const path = setupProjetionPath();
const data = parseWorldTopoJsonToGeoJsonFeatures(); const data = parseWorldTopoJsonToGeoJsonFeatures();
const svg = d3.select(element); const svg = d3.select(element);
const countriesLayer = svg.append("g");
const hoverLayer = svg.append("g").style("pointer-events", "none");
const hoverPath = hoverLayer
.append("path")
.datum(null)
.attr("class", highlightedCountryClass)
.style("display", "none");
svg.selectAll("path") countriesLayer
.selectAll("path")
.data(data) .data(data)
.enter() .enter()
.append("path") .append("path")
.attr("data-country-path", "true")
.attr("class", countryClass) .attr("class", countryClass)
.attr("d", path as never) .attr("d", path as never)
@@ -209,9 +218,10 @@ function drawInteractiveCountries(
y, y,
hoveredCountryAlpha3Code: country.properties.a3 hoveredCountryAlpha3Code: country.properties.a3
}); });
// brings country to front hoverPath
this.parentNode?.appendChild(this); .datum(country)
d3.select(this).attr("class", highlightedCountryClass); .attr("d", path(country) as string)
.style("display", null);
}) })
.on("mousemove", function (event) { .on("mousemove", function (event) {
@@ -221,7 +231,7 @@ function drawInteractiveCountries(
.on("mouseout", function () { .on("mouseout", function () {
setTooltip({ x: 0, y: 0, hoveredCountryAlpha3Code: null }); setTooltip({ x: 0, y: 0, hoveredCountryAlpha3Code: null });
d3.select(this).attr("class", countryClass); hoverPath.style("display", "none");
}); });
return svg; return svg;
@@ -257,7 +267,7 @@ function colorInCountriesWithValues(
const svg = d3.select(element); const svg = d3.select(element);
return svg return svg
.selectAll("path") .selectAll('path[data-country-path="true"]')
.style("fill", (countryPath) => { .style("fill", (countryPath) => {
const country = getCountryByCountryPath(countryPath); const country = getCountryByCountryPath(countryPath);
if (!country?.count) { if (!country?.count) {