This commit is contained in:
2025-03-07 17:45:17 +08:00
commit 936af0c4ec
114 changed files with 37662 additions and 0 deletions

87
backend/dist/utils/clickhouse.js vendored Normal file
View File

@@ -0,0 +1,87 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.initClickHouse = void 0;
const client_1 = require("@clickhouse/client");
const config_1 = __importDefault(require("../config"));
// Create ClickHouse client with error handling
const createClickHouseClient = () => {
try {
return (0, client_1.createClient)({
host: `http://${config_1.default.clickhouse.host}:${config_1.default.clickhouse.port}`,
username: config_1.default.clickhouse.user,
password: config_1.default.clickhouse.password,
database: config_1.default.clickhouse.database,
});
}
catch (error) {
console.error('Error creating ClickHouse client:', error);
// Return a mock client for development that logs operations instead of executing them
return {
query: async ({ query, values }) => {
console.log('ClickHouse query (mock):', query, values);
return { rows: [] };
},
close: async () => {
console.log('ClickHouse connection closed (mock)');
}
};
}
};
const clickhouse = createClickHouseClient();
// Initialize ClickHouse database and tables
const initClickHouse = async () => {
try {
// Create database if not exists
await clickhouse.query({
query: `CREATE DATABASE IF NOT EXISTS ${config_1.default.clickhouse.database}`,
});
// Create tables for tracking events
await clickhouse.query({
query: `
CREATE TABLE IF NOT EXISTS ${config_1.default.clickhouse.database}.view_events (
user_id String,
content_id String,
timestamp DateTime DEFAULT now(),
ip String,
user_agent String
) ENGINE = MergeTree()
PARTITION BY toYYYYMM(timestamp)
ORDER BY (user_id, content_id, timestamp)
`,
});
await clickhouse.query({
query: `
CREATE TABLE IF NOT EXISTS ${config_1.default.clickhouse.database}.like_events (
user_id String,
content_id String,
timestamp DateTime DEFAULT now(),
action Enum('like' = 1, 'unlike' = 2)
) ENGINE = MergeTree()
PARTITION BY toYYYYMM(timestamp)
ORDER BY (user_id, content_id, timestamp)
`,
});
await clickhouse.query({
query: `
CREATE TABLE IF NOT EXISTS ${config_1.default.clickhouse.database}.follower_events (
follower_id String,
followed_id String,
timestamp DateTime DEFAULT now(),
action Enum('follow' = 1, 'unfollow' = 2)
) ENGINE = MergeTree()
PARTITION BY toYYYYMM(timestamp)
ORDER BY (follower_id, followed_id, timestamp)
`,
});
console.log('ClickHouse database and tables initialized');
}
catch (error) {
console.error('Error initializing ClickHouse:', error);
console.log('Continuing with limited functionality...');
}
};
exports.initClickHouse = initClickHouse;
exports.default = clickhouse;

492
backend/dist/utils/initDatabase.js vendored Normal file
View File

@@ -0,0 +1,492 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.initDatabase = exports.checkDatabaseConnection = exports.createSampleData = exports.initSupabaseFunctions = exports.initClickHouseTables = exports.initSupabaseTables = void 0;
const supabase_1 = __importDefault(require("./supabase"));
const clickhouse_1 = __importDefault(require("./clickhouse"));
const promises_1 = __importDefault(require("fs/promises"));
const path_1 = __importDefault(require("path"));
/**
* 初始化 Supabase (PostgreSQL) 数据库表
*/
const initSupabaseTables = async () => {
try {
console.log('开始初始化 Supabase 数据表...');
// 创建用户扩展表
await supabase_1.default.rpc('create_user_profiles_if_not_exists');
// 创建项目表
await supabase_1.default.rpc('create_projects_table_if_not_exists');
// 创建网红(影响者)表
await supabase_1.default.rpc('create_influencers_table_if_not_exists');
// 创建项目-网红关联表
await supabase_1.default.rpc('create_project_influencers_table_if_not_exists');
// 创建帖子表
await supabase_1.default.rpc('create_posts_table_if_not_exists');
// 创建评论表
await supabase_1.default.rpc('create_comments_table_if_not_exists');
// 创建项目评论表
await supabase_1.default.rpc('create_project_comments_table_if_not_exists');
console.log('Supabase 数据表初始化完成');
return true;
}
catch (error) {
console.error('初始化 Supabase 数据表失败:', error);
return false;
}
};
exports.initSupabaseTables = initSupabaseTables;
/**
* 初始化 ClickHouse 数据库表
*/
const initClickHouseTables = async () => {
try {
console.log('开始初始化 ClickHouse 数据表...');
// 创建事件表
await clickhouse_1.default.query({
query: `
CREATE TABLE IF NOT EXISTS events (
event_id UUID DEFAULT generateUUIDv4(),
project_id UUID,
influencer_id UUID,
post_id UUID NULL,
platform String,
event_type Enum(
'follower_change' = 1,
'post_like_change' = 2,
'post_view_change' = 3,
'click' = 4,
'comment' = 5,
'share' = 6,
'project_comment' = 7
),
metric_value Int64,
event_metadata String,
timestamp DateTime DEFAULT now()
) ENGINE = MergeTree()
PARTITION BY toYYYYMM(timestamp)
ORDER BY (platform, influencer_id, post_id, event_type, timestamp)
`
});
// 创建统计视图 - 按天统计
await clickhouse_1.default.query({
query: `
CREATE MATERIALIZED VIEW IF NOT EXISTS daily_stats
ENGINE = SummingMergeTree()
PARTITION BY toYYYYMM(date)
ORDER BY (date, platform, influencer_id, event_type)
AS SELECT
toDate(timestamp) AS date,
platform,
influencer_id,
event_type,
SUM(metric_value) AS total_value,
COUNT(*) AS event_count
FROM events
GROUP BY date, platform, influencer_id, event_type
`
});
// 创建统计视图 - 按月统计
await clickhouse_1.default.query({
query: `
CREATE MATERIALIZED VIEW IF NOT EXISTS monthly_stats
ENGINE = SummingMergeTree()
ORDER BY (month, platform, influencer_id, event_type)
AS SELECT
toStartOfMonth(timestamp) AS month,
platform,
influencer_id,
event_type,
SUM(metric_value) AS total_value,
COUNT(*) AS event_count
FROM events
GROUP BY month, platform, influencer_id, event_type
`
});
// 创建帖子互动统计视图
await clickhouse_1.default.query({
query: `
CREATE MATERIALIZED VIEW IF NOT EXISTS post_interaction_stats
ENGINE = SummingMergeTree()
ORDER BY (post_id, event_type, date)
AS SELECT
post_id,
event_type,
toDate(timestamp) AS date,
SUM(metric_value) AS value,
COUNT(*) AS count
FROM events
WHERE post_id IS NOT NULL
GROUP BY post_id, event_type, date
`
});
// 创建项目互动统计视图
await clickhouse_1.default.query({
query: `
CREATE MATERIALIZED VIEW IF NOT EXISTS project_interaction_stats
ENGINE = SummingMergeTree()
ORDER BY (project_id, event_type, date)
AS SELECT
project_id,
event_type,
toDate(timestamp) AS date,
SUM(metric_value) AS value,
COUNT(*) AS count
FROM events
WHERE project_id IS NOT NULL AND event_type = 'project_comment'
GROUP BY project_id, event_type, date
`
});
console.log('ClickHouse 数据表初始化完成');
return true;
}
catch (error) {
console.error('初始化 ClickHouse 数据表失败:', error);
return false;
}
};
exports.initClickHouseTables = initClickHouseTables;
/**
* 初始化 Supabase 存储函数
*/
const initSupabaseFunctions = async () => {
try {
console.log('开始初始化 Supabase 存储过程...');
// 创建用户简档表的存储过程
await supabase_1.default.rpc('create_function_create_user_profiles_if_not_exists');
// 创建项目表的存储过程
await supabase_1.default.rpc('create_function_create_projects_table_if_not_exists');
// 创建网红表的存储过程
await supabase_1.default.rpc('create_function_create_influencers_table_if_not_exists');
// 创建项目-网红关联表的存储过程
await supabase_1.default.rpc('create_function_create_project_influencers_table_if_not_exists');
// 创建帖子表的存储过程
await supabase_1.default.rpc('create_function_create_posts_table_if_not_exists');
// 创建评论表的存储过程
await supabase_1.default.rpc('create_function_create_comments_table_if_not_exists');
// 创建项目评论表的存储过程
await supabase_1.default.rpc('create_function_create_project_comments_table_if_not_exists');
// 创建评论相关的SQL函数
console.log('创建评论相关的SQL函数...');
const commentsSQL = await promises_1.default.readFile(path_1.default.join(__dirname, 'supabase-comments-functions.sql'), 'utf8');
// 使用Supabase执行SQL
const { error: commentsFunctionsError } = await supabase_1.default.rpc('pgclient_execute', { query: commentsSQL });
if (commentsFunctionsError) {
console.error('创建评论SQL函数失败:', commentsFunctionsError);
}
else {
console.log('评论SQL函数创建成功');
}
console.log('Supabase 存储过程初始化完成');
return true;
}
catch (error) {
console.error('初始化 Supabase 存储过程失败:', error);
return false;
}
};
exports.initSupabaseFunctions = initSupabaseFunctions;
/**
* 创建测试数据
*/
const createSampleData = async () => {
try {
console.log('开始创建测试数据...');
// 创建测试用户
const { data: user, error: userError } = await supabase_1.default.auth.admin.createUser({
email: 'test@example.com',
password: 'password123',
user_metadata: {
full_name: '测试用户'
}
});
if (userError) {
console.error('创建测试用户失败:', userError);
return false;
}
// 创建测试项目
const { data: project, error: projectError } = await supabase_1.default
.from('projects')
.insert({
name: '测试营销活动',
description: '这是一个测试营销活动',
created_by: user.user.id
})
.select()
.single();
if (projectError) {
console.error('创建测试项目失败:', projectError);
return false;
}
// 创建项目评论
await supabase_1.default
.from('project_comments')
.insert([
{
project_id: project.id,
user_id: user.user.id,
content: '这是对项目的一条测试评论',
sentiment_score: 0.8
},
{
project_id: project.id,
user_id: user.user.id,
content: '这个项目很有前景',
sentiment_score: 0.9
},
{
project_id: project.id,
user_id: user.user.id,
content: '需要关注这个项目的进展',
sentiment_score: 0.7
}
]);
// 创建测试网红
const platforms = ['youtube', 'instagram', 'tiktok'];
const influencers = [];
for (let i = 1; i <= 10; i++) {
const platform = platforms[Math.floor(Math.random() * platforms.length)];
const { data: influencer, error: influencerError } = await supabase_1.default
.from('influencers')
.insert({
name: `测试网红 ${i}`,
platform,
profile_url: `https://${platform}.com/user${i}`,
external_id: `user_${platform}_${i}`,
followers_count: Math.floor(Math.random() * 1000000) + 1000,
video_count: Math.floor(Math.random() * 500) + 10
})
.select()
.single();
if (influencerError) {
console.error(`创建测试网红 ${i} 失败:`, influencerError);
continue;
}
influencers.push(influencer);
// 将网红添加到项目
await supabase_1.default
.from('project_influencers')
.insert({
project_id: project.id,
influencer_id: influencer.influencer_id
});
// 为每个网红创建 3-5 个帖子
const postCount = Math.floor(Math.random() * 3) + 3;
for (let j = 1; j <= postCount; j++) {
const { data: post, error: postError } = await supabase_1.default
.from('posts')
.insert({
influencer_id: influencer.influencer_id,
platform,
post_url: `https://${platform}.com/user${i}/post${j}`,
title: `测试帖子 ${j} - 由 ${influencer.name} 发布`,
description: `这是一个测试帖子的描述 ${j}`,
published_at: new Date(Date.now() - Math.floor(Math.random() * 30) * 24 * 60 * 60 * 1000).toISOString()
})
.select()
.single();
if (postError) {
console.error(`创建测试帖子 ${j} 失败:`, postError);
continue;
}
// 为每个帖子创建 2-10 个评论
const commentCount = Math.floor(Math.random() * 9) + 2;
for (let k = 1; k <= commentCount; k++) {
await supabase_1.default
.from('comments')
.insert({
post_id: post.post_id,
user_id: user.user.id,
content: `这是对帖子 ${post.title} 的测试评论 ${k}`,
sentiment_score: (Math.random() * 2 - 1) // -1 到 1 之间的随机数
});
}
// 创建 ClickHouse 事件数据
// 粉丝变化事件
await clickhouse_1.default.query({
query: `
INSERT INTO events (
project_id,
influencer_id,
platform,
event_type,
metric_value,
event_metadata
) VALUES (?, ?, ?, 'follower_change', ?, ?)
`,
values: [
project.id,
influencer.influencer_id,
platform,
Math.floor(Math.random() * 1000) - 200, // -200 到 800 之间的随机数
JSON.stringify({ source: 'api_crawler' })
]
});
// 帖子点赞变化事件
await clickhouse_1.default.query({
query: `
INSERT INTO events (
project_id,
influencer_id,
post_id,
platform,
event_type,
metric_value,
event_metadata
) VALUES (?, ?, ?, ?, 'post_like_change', ?, ?)
`,
values: [
project.id,
influencer.influencer_id,
post.post_id,
platform,
Math.floor(Math.random() * 500) + 10, // 10 到 510 之间的随机数
JSON.stringify({ source: 'api_crawler' })
]
});
// 帖子观看数变化事件
await clickhouse_1.default.query({
query: `
INSERT INTO events (
project_id,
influencer_id,
post_id,
platform,
event_type,
metric_value,
event_metadata
) VALUES (?, ?, ?, ?, 'post_view_change', ?, ?)
`,
values: [
project.id,
influencer.influencer_id,
post.post_id,
platform,
Math.floor(Math.random() * 5000) + 100, // 100 到 5100 之间的随机数
JSON.stringify({ source: 'api_crawler' })
]
});
// 互动事件
const interactionTypes = ['click', 'comment', 'share'];
const interactionType = interactionTypes[Math.floor(Math.random() * interactionTypes.length)];
await clickhouse_1.default.query({
query: `
INSERT INTO events (
project_id,
influencer_id,
post_id,
platform,
event_type,
metric_value,
event_metadata
) VALUES (?, ?, ?, ?, ?, ?, ?)
`,
values: [
project.id,
influencer.influencer_id,
post.post_id,
platform,
interactionType,
1,
JSON.stringify({
ip: '192.168.1.' + Math.floor(Math.random() * 255),
user_agent: 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36'
})
]
});
}
}
// 创建项目评论事件
for (let i = 1; i <= 5; i++) {
await clickhouse_1.default.query({
query: `
INSERT INTO events (
project_id,
event_type,
metric_value,
event_metadata
) VALUES (?, 'project_comment', ?, ?)
`,
values: [
project.id,
1,
JSON.stringify({
user_id: user.user.id,
timestamp: new Date().toISOString(),
comment: `项目评论事件 ${i}`
})
]
});
}
console.log('测试数据创建完成');
return true;
}
catch (error) {
console.error('创建测试数据失败:', error);
return false;
}
};
exports.createSampleData = createSampleData;
/**
* 检查数据库连接
*/
const checkDatabaseConnection = async () => {
try {
console.log('检查数据库连接...');
// 检查 Supabase 连接
try {
// 仅检查连接是否正常,不执行实际查询
const { data, error } = await supabase_1.default.auth.getSession();
if (error) {
console.error('Supabase 连接测试失败:', error);
return false;
}
console.log('Supabase 连接正常');
}
catch (supabaseError) {
console.error('Supabase 连接测试失败:', supabaseError);
return false;
}
// 检查 ClickHouse 连接
try {
// 使用简单查询代替ping方法
const result = await clickhouse_1.default.query({ query: 'SELECT 1' });
console.log('ClickHouse 连接正常');
}
catch (error) {
console.error('ClickHouse 连接测试失败:', error);
return false;
}
console.log('数据库连接检查完成,所有连接均正常');
return true;
}
catch (error) {
console.error('数据库连接检查失败:', error);
return false;
}
};
exports.checkDatabaseConnection = checkDatabaseConnection;
/**
* 初始化数据库 - 此函数现在仅作为手动初始化的入口点
* 只有通过管理API明确调用时才会执行实际的初始化
*/
const initDatabase = async () => {
try {
console.log('开始数据库初始化...');
console.log('警告: 此操作将修改数据库结构,请确保您知道自己在做什么');
// 初始化 Supabase 函数
await (0, exports.initSupabaseFunctions)();
// 初始化 Supabase 表
await (0, exports.initSupabaseTables)();
// 初始化 ClickHouse 表
await (0, exports.initClickHouseTables)();
console.log('数据库初始化完成');
return true;
}
catch (error) {
console.error('数据库初始化失败:', error);
return false;
}
};
exports.initDatabase = initDatabase;

158
backend/dist/utils/queue.js vendored Normal file
View File

@@ -0,0 +1,158 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.addNotificationJob = exports.addAnalyticsJob = exports.initWorkers = exports.QUEUE_NAMES = void 0;
const bullmq_1 = require("bullmq");
const config_1 = __importDefault(require("../config"));
// Define queue names
exports.QUEUE_NAMES = {
ANALYTICS: 'analytics',
NOTIFICATIONS: 'notifications',
};
// Create Redis connection options
const redisOptions = {
host: config_1.default.bull.redis.host,
port: config_1.default.bull.redis.port,
password: config_1.default.bull.redis.password,
};
// Create queues with error handling
let analyticsQueue;
let notificationsQueue;
try {
analyticsQueue = new bullmq_1.Queue(exports.QUEUE_NAMES.ANALYTICS, {
connection: redisOptions,
defaultJobOptions: {
attempts: 3,
backoff: {
type: 'exponential',
delay: 1000,
},
},
});
notificationsQueue = new bullmq_1.Queue(exports.QUEUE_NAMES.NOTIFICATIONS, {
connection: redisOptions,
defaultJobOptions: {
attempts: 3,
backoff: {
type: 'exponential',
delay: 1000,
},
},
});
}
catch (error) {
console.error('Error initializing BullMQ queues:', error);
// Create mock queues for development
analyticsQueue = {
add: async (name, data) => {
console.log(`Mock analytics job added: ${name}`, data);
return { id: 'mock-job-id' };
},
close: async () => console.log('Mock analytics queue closed'),
};
notificationsQueue = {
add: async (name, data) => {
console.log(`Mock notification job added: ${name}`, data);
return { id: 'mock-job-id' };
},
close: async () => console.log('Mock notifications queue closed'),
};
}
// Initialize workers
const initWorkers = () => {
try {
// Analytics worker
const analyticsWorker = new bullmq_1.Worker(exports.QUEUE_NAMES.ANALYTICS, async (job) => {
console.log(`Processing analytics job ${job.id}`);
const { type, data } = job.data;
switch (type) {
case 'process_views':
// Process view analytics
console.log('Processing view analytics', data);
break;
case 'process_likes':
// Process like analytics
console.log('Processing like analytics', data);
break;
case 'process_followers':
// Process follower analytics
console.log('Processing follower analytics', data);
break;
default:
console.log(`Unknown analytics job type: ${type}`);
}
}, { connection: redisOptions });
// Notifications worker
const notificationsWorker = new bullmq_1.Worker(exports.QUEUE_NAMES.NOTIFICATIONS, async (job) => {
console.log(`Processing notification job ${job.id}`);
const { type, data } = job.data;
switch (type) {
case 'new_follower':
// Send new follower notification
console.log('Sending new follower notification', data);
break;
case 'new_like':
// Send new like notification
console.log('Sending new like notification', data);
break;
default:
console.log(`Unknown notification job type: ${type}`);
}
}, { connection: redisOptions });
// Handle worker events
analyticsWorker.on('completed', (job) => {
console.log(`Analytics job ${job.id} completed`);
});
analyticsWorker.on('failed', (job, err) => {
console.error(`Analytics job ${job?.id} failed with error ${err.message}`);
});
notificationsWorker.on('completed', (job) => {
console.log(`Notification job ${job.id} completed`);
});
notificationsWorker.on('failed', (job, err) => {
console.error(`Notification job ${job?.id} failed with error ${err.message}`);
});
return {
analyticsWorker,
notificationsWorker,
};
}
catch (error) {
console.error('Error initializing BullMQ workers:', error);
// Return mock workers
return {
analyticsWorker: {
close: async () => console.log('Mock analytics worker closed'),
},
notificationsWorker: {
close: async () => console.log('Mock notifications worker closed'),
},
};
}
};
exports.initWorkers = initWorkers;
// Helper function to add jobs to queues
const addAnalyticsJob = async (type, data, options = {}) => {
try {
return await analyticsQueue.add(type, { type, data }, options);
}
catch (error) {
console.error('Error adding analytics job:', error);
console.log('Job details:', { type, data });
return null;
}
};
exports.addAnalyticsJob = addAnalyticsJob;
const addNotificationJob = async (type, data, options = {}) => {
try {
return await notificationsQueue.add(type, { type, data }, options);
}
catch (error) {
console.error('Error adding notification job:', error);
console.log('Job details:', { type, data });
return null;
}
};
exports.addNotificationJob = addNotificationJob;

80
backend/dist/utils/redis.js vendored Normal file
View File

@@ -0,0 +1,80 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.getRedisClient = exports.connectRedis = exports.redisClient = void 0;
const redis_1 = require("redis");
const config_1 = __importDefault(require("../config"));
// Create Redis client
const redisClient = (0, redis_1.createClient)({
url: `redis://${config_1.default.redis.password ? `${config_1.default.redis.password}@` : ''}${config_1.default.redis.host}:${config_1.default.redis.port}`,
});
exports.redisClient = redisClient;
// Handle Redis connection errors
redisClient.on('error', (err) => {
console.error('Redis Client Error:', err);
});
// Create a mock Redis client for development when real connection fails
const createMockRedisClient = () => {
const store = new Map();
return {
isOpen: true,
connect: async () => console.log('Mock Redis client connected'),
get: async (key) => store.get(key) || null,
set: async (key, value) => {
store.set(key, value);
return 'OK';
},
incr: async (key) => {
const current = parseInt(store.get(key) || '0', 10);
const newValue = current + 1;
store.set(key, newValue.toString());
return newValue;
},
decr: async (key) => {
const current = parseInt(store.get(key) || '0', 10);
const newValue = Math.max(0, current - 1);
store.set(key, newValue.toString());
return newValue;
},
quit: async () => console.log('Mock Redis client disconnected'),
};
};
// Connect to Redis
let mockRedisClient = null;
const connectRedis = async () => {
try {
if (!redisClient.isOpen) {
await redisClient.connect();
console.log('Redis client connected');
}
return redisClient;
}
catch (error) {
console.error('Failed to connect to Redis:', error);
console.log('Using mock Redis client for development...');
if (!mockRedisClient) {
mockRedisClient = createMockRedisClient();
}
return mockRedisClient;
}
};
exports.connectRedis = connectRedis;
// Export the appropriate client
const getRedisClient = async () => {
try {
if (redisClient.isOpen) {
return redisClient;
}
return await connectRedis();
}
catch (error) {
if (!mockRedisClient) {
mockRedisClient = createMockRedisClient();
}
return mockRedisClient;
}
};
exports.getRedisClient = getRedisClient;
exports.default = redisClient;

18
backend/dist/utils/supabase.js vendored Normal file
View File

@@ -0,0 +1,18 @@
"use strict";
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
const supabase_js_1 = require("@supabase/supabase-js");
const config_1 = __importDefault(require("../config"));
// Validate Supabase URL
const validateSupabaseUrl = (url) => {
if (!url || !url.startsWith('http')) {
console.warn('Invalid Supabase URL provided. Using a placeholder for development.');
return 'https://example.supabase.co';
}
return url;
};
// Create a single supabase client for interacting with your database
const supabase = (0, supabase_js_1.createClient)(validateSupabaseUrl(config_1.default.supabase.url), config_1.default.supabase.key || 'dummy-key');
exports.default = supabase;