For Workers & Pages, what is the name of the domain?
What is the issue or error you’re encountering
cors
What steps have you taken to resolve the issue?
Access to resource at ‘https://kgrsse.v2ray-tokyo.workers.dev/results?batchGroupId=ddcce584-efbc-4eeb-84e5-3149234df38c’ from origin ‘https://wanghaisheng.github.io’ has been blocked by CORS policy: No ‘Access-Control-Allow-Origin’ header is present on the requested resource.
try all kinds of way, weir thing is submit endpoint there is cors issue at all ,butt this results got constant error, any help will be appreciated
What are the steps to reproduce the issue?
// Configuration Constants
const CONFIG = {
cors: {
origin: ‘https://wanghaisheng.github.io’,
headers: {
‘Access-Control-Allow-Origin’: ‘https://wanghaisheng.github.io’,
‘Access-Control-Allow-Methods’: ‘GET, POST, OPTIONS’,
‘Access-Control-Allow-Headers’: ‘Content-Type, Authorization’,
‘Access-Control-Allow-Credentials’: ‘true’
}
},
user: {
login: ‘wanghaisheng’,
targetDate: ‘2024-12-25 00:30:36’
},
sse: {
retryInterval: 1000,
pollInterval: 1000,
keepaliveInterval: 15000
},
batch: {
size: 10
},
debug: true
};
// Debug Logger
function debug(area, message, data = {}) {
if (CONFIG.debug) {
console.log([${area}][${new Date().toISOString()}] ${message}
, data);
}
}
// Context Helper
function getContext() {
return {
timestamp: new Date().toISOString(),
user: CONFIG.user.login,
requestId: crypto.randomUUID(),
targetDate: CONFIG.user.targetDate
};
}
// Helper function to get headers
function getHeaders(origin, isSSE = false) {
const headers = {
‘Access-Control-Allow-Origin’: origin,
‘Access-Control-Allow-Methods’: ‘GET, POST, OPTIONS’,
‘Access-Control-Allow-Headers’: ‘Content-Type, Authorization’,
‘Access-Control-Allow-Credentials’: ‘true’
};
if (isSSE) {
return {
…headers,
‘Content-Type’: ‘text/event-stream’,
‘Cache-Control’: ‘no-cache’,
‘Connection’: ‘keep-alive’
};
}
return {
…headers,
‘Content-Type’: ‘application/json’
};
}
// Main Worker
export default {
async fetch(request, env) {
const url = new URL(request.url);
const origin = request.headers.get(‘Origin’) || CONFIG.cors.origin;
const context = getContext();
// Handle CORS preflight
if (request.method === 'OPTIONS') {
debug('CORS', 'Handling CORS preflight request');
return new Response(null, {
status: 204,
headers: new Headers(getHeaders(origin))
});
}
try {
if (url.pathname === '/submit' && request.method === 'POST') {
return await handleSubmit(request, env, origin);
}
if (url.pathname === '/results' && request.method === 'GET') {
const batchGroupId = url.searchParams.get('batchGroupId');
if (!batchGroupId) {
return new Response(
JSON.stringify({ error: 'BatchGroupId is required', ...context }),
{
status: 400,
headers: new Headers(getHeaders(origin))
}
);
}
return await handleSSE(request, env, batchGroupId, origin);
}
return new Response(
JSON.stringify({ error: 'Not found', ...context }),
{
status: 404,
headers: new Headers(getHeaders(origin))
}
);
} catch (error) {
debug('Worker', 'Unhandled error', { error });
return new Response(
JSON.stringify({
error: error.message || 'Internal Server Error',
...context
}),
{
status: 500,
headers: new Headers(getHeaders(origin))
}
);
}
}
};
// Submit Handler
async function handleSubmit(request, env, origin) {
const context = getContext();
debug(‘Submit’, ‘Processing submit request’, context);
try {
const data = await request.json();
let keywords = data.keywords;
if (!Array.isArray(keywords) || keywords.length === 0) {
throw new Error(‘Invalid keywords format’);
}
// Create batches
const batches = [];
for (let i = 0; i < keywords.length; i += CONFIG.batch.size) {
const batchKeywords = keywords.slice(i, i + CONFIG.batch.size);
const batchId = crypto.randomUUID();
batches.push({ batchId, keywords: batchKeywords });
}
// Create batch group
const batchGroupId = crypto.randomUUID();
const batchGroup = {
totalBatches: batches.length,
batches: batches.map(b => b.batchId),
completedBatches: 0,
...context
};
await env.R2_BUCKET.put(
`batch-groups/${batchGroupId}.json`,
JSON.stringify(batchGroup)
);
// Trigger GitHub Actions
for (const batch of batches) {
await triggerGitHubAction(batch.keywords, batch.batchId, batchGroupId, env);
}
return new Response(
JSON.stringify({
status: 'success',
batchGroupId,
totalBatches: batches.length,
...context
}),
{
status: 200,
headers: new Headers(getHeaders(origin))
}
);
} catch (error) {
debug(‘Submit’, ‘Error processing submit’, { error });
return new Response(
JSON.stringify({
error: error.message || ‘Internal Server Error’,
…context
}),
{
status: 500,
headers: new Headers(getHeaders(origin))
}
);
}
}
// SSE Handler
async function handleSSE(request, env, batchGroupId, origin) {
const context = getContext();
debug(‘SSE’, ‘Setting up SSE connection’, { batchGroupId, origin });
try {
// Validate batch group
const batchGroup = await getBatchGroupStatus(env, batchGroupId);
if (!batchGroup) {
debug(‘SSE’, ‘Batch group not found’, { batchGroupId });
return new Response(
JSON.stringify({ error: ‘Batch group not found’, …context }),
{
status: 404,
headers: new Headers(getHeaders(origin))
}
);
}
let isConnectionClosed = false;
const stream = new TransformStream();
const writer = stream.writable.getWriter();
const encoder = new TextEncoder();
// Helper for sending SSE messages
async function sendMessage(data) {
if (isConnectionClosed) return;
try {
const message = JSON.stringify({
...data,
timestamp: new Date().toISOString()
});
await writer.write(encoder.encode(`data: ${message}\n\n`));
debug('SSE', 'Message sent', { data });
} catch (error) {
debug('SSE', 'Send error', { error });
}
}
// Initialize SSE connection
await writer.write(encoder.encode(`retry: ${CONFIG.sse.retryInterval}\n\n`));
await sendMessage({
status: 'connected',
batchGroupId,
...context
});
// Set up keepalive
const keepaliveInterval = setInterval(async () => {
if (!isConnectionClosed) {
await writer.write(encoder.encode(': keepalive\n\n'));
}
}, CONFIG.sse.keepaliveInterval);
// Set up polling
const pollInterval = setInterval(async () => {
if (isConnectionClosed) return;
try {
const updatedBatchGroup = await getBatchGroupStatus(env, batchGroupId);
if (!updatedBatchGroup) {
throw new Error('Batch group not found');
}
const results = [];
for (const batchId of updatedBatchGroup.batches) {
const batchResults = await getResultsFromR2(env, batchId);
if (batchResults) {
results.push(...batchResults);
}
}
await sendMessage({
status: 'progress',
completedBatches: updatedBatchGroup.completedBatches,
totalBatches: updatedBatchGroup.totalBatches,
results
});
if (updatedBatchGroup.completedBatches === updatedBatchGroup.totalBatches) {
await sendMessage({ status: 'completed' });
isConnectionClosed = true;
clearInterval(keepaliveInterval);
clearInterval(pollInterval);
await writer.close();
}
} catch (error) {
debug('SSE', 'Polling error', { error });
await sendMessage({
status: 'error',
error: error.message
});
}
}, CONFIG.sse.pollInterval);
// Handle client disconnect
request.signal.addEventListener('abort', () => {
isConnectionClosed = true;
clearInterval(keepaliveInterval);
clearInterval(pollInterval);
});
return new Response(stream.readable, {
headers: new Headers(getHeaders(origin, true))
});
} catch (error) {
debug(‘SSE’, ‘Setup error’, { error });
return new Response(
JSON.stringify({
error: error.message || ‘Internal Server Error’,
…context
}),
{
status: 500,
headers: new Headers(getHeaders(origin, true))
}
);
}
}
// GitHub Action Trigger
async function triggerGitHubAction(keywords, batchId, batchGroupId, env) {
debug(‘GitHub’, ‘Triggering action’, { batchId, batchGroupId, keywords });
const githubToken = env.GITHUB_TOKEN;
const owner = env.GITHUB_OWNER;
const repo = env.GITHUB_REPO;
if (!githubToken || !owner || !repo) {
throw new Error(‘Missing GitHub configuration’);
}
const response = await fetch(
https://api.github.com/repos/${owner}/${repo}/actions/workflows/sse.yml/dispatches
,
{
method: ‘POST’,
headers: {
‘Authorization’: Bearer ${githubToken}
,
‘User-Agent’: ‘CloudflareWorker/1.0’,
‘Accept’: ‘application/vnd.github.v3+json’,
‘Content-Type’: ‘application/json’,
},
body: JSON.stringify({
ref: ‘main’,
inputs: {
keywords: keywords.join(‘,’),
batchId,
batchGroupId
},
}),
}
);
if (!response.ok) {
const errorText = await response.text();
debug(‘GitHub’, ‘Action dispatch failed’, { status: response.status, errorText });
throw new Error(GitHub Action dispatch failed: ${response.status} - ${errorText}
);
}
}
// R2 Operations
async function getBatchGroupStatus(env, batchGroupId) {
try {
const data = await env.R2_BUCKET.get(batch-groups/${batchGroupId}.json
);
if (!data) {
debug(‘R2’, ‘Batch group not found’, { batchGroupId });
return null;
}
return JSON.parse(await data.text());
} catch (error) {
debug(‘R2’, ‘Read error’, { error, batchGroupId });
throw error;
}
}
async function getResultsFromR2(env, batchId) {
try {
const data = await env.R2_BUCKET.get(results/${batchId}.json
);
if (!data) {
debug(‘R2’, ‘Results not found’, { batchId });
return null;
}
return JSON.parse(await data.text());
} catch (error) {
debug(‘R2’, ‘Results read error’, { error, batchId });
return null;
}
}