SEO site map and robot
This commit is contained in:
parent
1129489750
commit
e03e5aec23
6 changed files with 373 additions and 1 deletions
|
|
@ -8,6 +8,7 @@ const { query, pool } = require('./db')
|
|||
const authMiddleware = require('./middleware/auth');
|
||||
const adminAuthMiddleware = require('./middleware/adminAuth');
|
||||
const settingsAdminRoutes = require('./routes/settingsAdmin');
|
||||
const seoMiddleware = require('./middleware/seoMiddleware');
|
||||
const SystemSettings = require('./models/SystemSettings');
|
||||
const fs = require('fs');
|
||||
// services
|
||||
|
|
@ -162,7 +163,7 @@ pool.connect()
|
|||
} catch (error) {
|
||||
console.error('Error processing low stock notifications:', error);
|
||||
}
|
||||
}, timeInterval);
|
||||
}, siteGeneratorInterval);
|
||||
}
|
||||
|
||||
// Handle SSL proxy headers
|
||||
|
|
@ -174,6 +175,7 @@ app.use((req, res, next) => {
|
|||
next();
|
||||
});
|
||||
app.set('trust proxy', true);
|
||||
app.use(seoMiddleware);
|
||||
// Middleware
|
||||
app.use(cors({
|
||||
origin: '*',
|
||||
|
|
|
|||
19
backend/src/middleware/seoMiddleware.js
Normal file
19
backend/src/middleware/seoMiddleware.js
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
/**
|
||||
* Middleware to handle serving SEO files with correct content types
|
||||
*/
|
||||
const seoMiddleware = (req, res, next) => {
|
||||
if (req.path === '/sitemap.xml') {
|
||||
res.setHeader('Content-Type', 'application/xml');
|
||||
res.setHeader('Access-Control-Allow-Origin', '*');
|
||||
res.setHeader('Access-Control-Allow-Methods', 'GET, HEAD');
|
||||
}
|
||||
else if (req.path === '/robots.txt') {
|
||||
res.setHeader('Content-Type', 'text/plain');
|
||||
res.setHeader('Access-Control-Allow-Origin', '*');
|
||||
res.setHeader('Access-Control-Allow-Methods', 'GET, HEAD');
|
||||
}
|
||||
|
||||
next();
|
||||
};
|
||||
|
||||
module.exports = seoMiddleware;
|
||||
|
|
@ -8,6 +8,7 @@ import useBrandingSettings from '@hooks/brandingHooks';
|
|||
import imageUtils from '@utils/imageUtils';
|
||||
import Clarity from '@microsoft/clarity';
|
||||
import CookieConsentPopup from '@components/CookieConsentPopup';
|
||||
import SeoProxyRoutes from '@components/SeoProxyRoutes'; // Import SeoProxyRoutes
|
||||
|
||||
// Import layouts
|
||||
import MainLayout from './layouts/MainLayout';
|
||||
|
|
@ -131,6 +132,10 @@ function App() {
|
|||
<Suspense fallback={<LoadingComponent />}>
|
||||
<Notifications />
|
||||
<CookieConsentPopup />
|
||||
|
||||
{/* SEO Routes for sitemap.xml and robots.txt */}
|
||||
<SeoProxyRoutes />
|
||||
|
||||
<Routes>
|
||||
{/* Main routes with MainLayout */}
|
||||
<Route path="/" element={<MainLayout />}>
|
||||
|
|
|
|||
106
frontend/src/components/SeoProxyRoutes.jsx
Normal file
106
frontend/src/components/SeoProxyRoutes.jsx
Normal file
|
|
@ -0,0 +1,106 @@
|
|||
import React, { useEffect, useState } from 'react';
|
||||
import { Routes, Route } from 'react-router-dom';
|
||||
import axiosClient from '@services/seoapi';
|
||||
|
||||
/**
|
||||
* Component to serve SEO files (sitemap.xml, robots.txt) directly from API
|
||||
*/
|
||||
const SeoFile = ({ filePath }) => {
|
||||
const [content, setContent] = useState('');
|
||||
const [contentType, setContentType] = useState('');
|
||||
const [error, setError] = useState(null);
|
||||
|
||||
useEffect(() => {
|
||||
// Determine the content type based on the file extension
|
||||
const fileExtension = filePath.split('.').pop();
|
||||
const type = fileExtension === 'xml' ? 'application/xml' : 'text/plain';
|
||||
setContentType(type);
|
||||
|
||||
// Fetch the file from the API
|
||||
axiosClient.get(filePath, {
|
||||
responseType: 'text',
|
||||
headers: {
|
||||
'Accept': type
|
||||
}
|
||||
})
|
||||
.then(response => {
|
||||
setContent(response.data);
|
||||
})
|
||||
.catch(err => {
|
||||
console.error(`Error fetching ${filePath}:`, err);
|
||||
setError(`Error loading ${filePath}. ${err.message}`);
|
||||
});
|
||||
}, [filePath]);
|
||||
|
||||
// Set the content type and return the raw content
|
||||
useEffect(() => {
|
||||
if (content && contentType) {
|
||||
// For XML content, we need to handle it differently than document.write
|
||||
if (contentType.includes('xml')) {
|
||||
// Clear the existing document content
|
||||
document.body.innerHTML = '';
|
||||
document.head.innerHTML = '';
|
||||
|
||||
// Set the XML MIME type
|
||||
const meta = document.createElement('meta');
|
||||
meta.httpEquiv = 'Content-Type';
|
||||
meta.content = `${contentType}; charset=utf-8`;
|
||||
document.head.appendChild(meta);
|
||||
|
||||
// Create a pre element to display the XML with proper formatting
|
||||
const pre = document.createElement('pre');
|
||||
pre.textContent = content;
|
||||
document.body.appendChild(pre);
|
||||
|
||||
// For XML styling - optional but makes it look nicer
|
||||
const style = document.createElement('style');
|
||||
style.textContent = `
|
||||
body {
|
||||
font-family: monospace;
|
||||
background: #282c34;
|
||||
color: #abb2bf;
|
||||
padding: 20px;
|
||||
}
|
||||
pre {
|
||||
white-space: pre-wrap;
|
||||
word-wrap: break-word;
|
||||
}
|
||||
`;
|
||||
document.head.appendChild(style);
|
||||
} else {
|
||||
// For text content like robots.txt, use the standard approach
|
||||
document.open();
|
||||
document.write(content);
|
||||
document.close();
|
||||
|
||||
// Set the correct content type
|
||||
const meta = document.createElement('meta');
|
||||
meta.httpEquiv = 'Content-Type';
|
||||
meta.content = `${contentType}; charset=utf-8`;
|
||||
document.head.appendChild(meta);
|
||||
}
|
||||
}
|
||||
}, [content, contentType]);
|
||||
|
||||
// If there was an error, show a simple error message
|
||||
if (error) {
|
||||
return <div>{error}</div>;
|
||||
}
|
||||
|
||||
// During loading, return nothing (blank page)
|
||||
return null;
|
||||
};
|
||||
|
||||
/**
|
||||
* Routes component that handles SEO file requests
|
||||
*/
|
||||
const SeoProxyRoutes = () => {
|
||||
return (
|
||||
<Routes>
|
||||
<Route path="/sitemap.xml" element={<SeoFile filePath="/sitemap.xml" />} />
|
||||
<Route path="/robots.txt" element={<SeoFile filePath="/robots.txt" />} />
|
||||
</Routes>
|
||||
);
|
||||
};
|
||||
|
||||
export default SeoProxyRoutes;
|
||||
198
frontend/src/hooks/useSeoMeta.js
Normal file
198
frontend/src/hooks/useSeoMeta.js
Normal file
|
|
@ -0,0 +1,198 @@
|
|||
import { useEffect, useState } from 'react';
|
||||
import { useLocation } from 'react-router-dom';
|
||||
import apiClient from '@services/api';
|
||||
|
||||
/**
|
||||
* Custom hook for managing SEO metadata
|
||||
* @param {Object} options - Configuration options
|
||||
* @param {string} options.title - Page title
|
||||
* @param {string} options.description - Page description
|
||||
* @param {string} options.image - Social media image URL
|
||||
* @param {string} options.type - Open Graph type (article, website, etc.)
|
||||
* @returns {Object} SEO utilities
|
||||
*/
|
||||
const useSeoMeta = (options = {}) => {
|
||||
const location = useLocation();
|
||||
const [isLoaded, setIsLoaded] = useState(false);
|
||||
|
||||
// Set default page metadata
|
||||
useEffect(() => {
|
||||
if (!options || isLoaded) return;
|
||||
|
||||
const {
|
||||
title,
|
||||
description,
|
||||
image,
|
||||
type = 'website'
|
||||
} = options;
|
||||
|
||||
// Update document title
|
||||
if (title) {
|
||||
document.title = title;
|
||||
}
|
||||
|
||||
// Update meta description
|
||||
if (description) {
|
||||
let metaDescription = document.querySelector('meta[name="description"]');
|
||||
if (!metaDescription) {
|
||||
metaDescription = document.createElement('meta');
|
||||
metaDescription.name = 'description';
|
||||
document.head.appendChild(metaDescription);
|
||||
}
|
||||
metaDescription.content = description;
|
||||
}
|
||||
|
||||
// Set Open Graph meta tags
|
||||
const updateMetaTag = (property, content) => {
|
||||
if (!content) return;
|
||||
|
||||
let meta = document.querySelector(`meta[property="${property}"]`);
|
||||
if (!meta) {
|
||||
meta = document.createElement('meta');
|
||||
meta.setAttribute('property', property);
|
||||
document.head.appendChild(meta);
|
||||
}
|
||||
meta.content = content;
|
||||
};
|
||||
|
||||
// Set canonical URL
|
||||
const canonical = document.querySelector('link[rel="canonical"]');
|
||||
const url = `${window.location.origin}${location.pathname}`;
|
||||
|
||||
if (!canonical) {
|
||||
const link = document.createElement('link');
|
||||
link.rel = 'canonical';
|
||||
link.href = url;
|
||||
document.head.appendChild(link);
|
||||
} else {
|
||||
canonical.href = url;
|
||||
}
|
||||
|
||||
// Update Open Graph tags
|
||||
if (title) updateMetaTag('og:title', title);
|
||||
if (description) updateMetaTag('og:description', description);
|
||||
if (image) updateMetaTag('og:image', image);
|
||||
updateMetaTag('og:url', url);
|
||||
updateMetaTag('og:type', type);
|
||||
|
||||
// Update Twitter Card tags
|
||||
if (title) updateMetaTag('twitter:title', title);
|
||||
if (description) updateMetaTag('twitter:description', description);
|
||||
if (image) updateMetaTag('twitter:image', image);
|
||||
updateMetaTag('twitter:card', image ? 'summary_large_image' : 'summary');
|
||||
|
||||
setIsLoaded(true);
|
||||
}, [options, location.pathname, isLoaded]);
|
||||
|
||||
/**
|
||||
* Function to fetch and insert structured data schema
|
||||
* @param {string} type - Schema type (Product, Article, etc.)
|
||||
* @param {Object} data - Schema data
|
||||
*/
|
||||
const setSchema = (type, data) => {
|
||||
// Remove any existing schema
|
||||
const existingSchema = document.querySelector('script[type="application/ld+json"]');
|
||||
if (existingSchema) {
|
||||
existingSchema.remove();
|
||||
}
|
||||
|
||||
// Create the schema based on type
|
||||
let schema = {
|
||||
'@context': 'https://schema.org',
|
||||
'@type': type,
|
||||
...data
|
||||
};
|
||||
|
||||
// Add the schema to the page
|
||||
const script = document.createElement('script');
|
||||
script.type = 'application/ld+json';
|
||||
script.text = JSON.stringify(schema);
|
||||
document.head.appendChild(script);
|
||||
};
|
||||
|
||||
/**
|
||||
* Generate breadcrumb schema based on current path
|
||||
*/
|
||||
const setBreadcrumbSchema = () => {
|
||||
const paths = location.pathname.split('/').filter(Boolean);
|
||||
|
||||
if (paths.length === 0) return; // Don't set breadcrumbs for homepage
|
||||
|
||||
const itemListElements = [];
|
||||
let currentPath = '';
|
||||
|
||||
// Always add Home as the first item
|
||||
itemListElements.push({
|
||||
'@type': 'ListItem',
|
||||
'position': 1,
|
||||
'name': 'Home',
|
||||
'item': `${window.location.origin}/`
|
||||
});
|
||||
|
||||
// Add each path segment as a breadcrumb item
|
||||
paths.forEach((path, index) => {
|
||||
currentPath += `/${path}`;
|
||||
|
||||
// Format the name (capitalize, replace hyphens with spaces)
|
||||
const name = path
|
||||
.replace(/-/g, ' ')
|
||||
.replace(/\b\w/g, char => char.toUpperCase());
|
||||
|
||||
itemListElements.push({
|
||||
'@type': 'ListItem',
|
||||
'position': index + 2, // +2 because Home is position 1
|
||||
'name': name,
|
||||
'item': `${window.location.origin}${currentPath}`
|
||||
});
|
||||
});
|
||||
|
||||
const breadcrumbSchema = {
|
||||
'@context': 'https://schema.org',
|
||||
'@type': 'BreadcrumbList',
|
||||
'itemListElement': itemListElements
|
||||
};
|
||||
|
||||
// Add breadcrumb schema to the page
|
||||
const script = document.createElement('script');
|
||||
script.type = 'application/ld+json';
|
||||
script.text = JSON.stringify(breadcrumbSchema);
|
||||
document.head.appendChild(script);
|
||||
};
|
||||
|
||||
/**
|
||||
* Checks if sitemap.xml exists and is accessible
|
||||
* @returns {Promise<boolean>} - Whether sitemap exists
|
||||
*/
|
||||
const checkSitemapExists = async () => {
|
||||
try {
|
||||
const response = await apiClient.head('/sitemap.xml');
|
||||
return response.status === 200;
|
||||
} catch (error) {
|
||||
console.error('Error checking sitemap:', error);
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Checks if robots.txt exists and is accessible
|
||||
* @returns {Promise<boolean>} - Whether robots.txt exists
|
||||
*/
|
||||
const checkRobotsTxtExists = async () => {
|
||||
try {
|
||||
const response = await apiClient.head('/robots.txt');
|
||||
return response.status === 200;
|
||||
} catch (error) {
|
||||
console.error('Error checking robots.txt:', error);
|
||||
return false;
|
||||
}
|
||||
};
|
||||
|
||||
return {
|
||||
setSchema,
|
||||
setBreadcrumbSchema,
|
||||
checkSitemapExists,
|
||||
checkRobotsTxtExists
|
||||
};
|
||||
};
|
||||
|
||||
export default useSeoMeta;
|
||||
42
frontend/src/services/seoapi.js
Normal file
42
frontend/src/services/seoapi.js
Normal file
|
|
@ -0,0 +1,42 @@
|
|||
import axios from 'axios';
|
||||
import { store } from '../store';
|
||||
|
||||
// Create the base axios instance
|
||||
const axiosClient = axios.create({
|
||||
baseURL: import.meta.env.VITE_API_URL.split('/api')[0],
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
});
|
||||
|
||||
// Add request interceptor to include API key in headers if available
|
||||
axiosClient.interceptors.request.use(
|
||||
(config) => {
|
||||
const state = store.getState();
|
||||
const apiKey = state.auth.apiKey;
|
||||
|
||||
if (apiKey) {
|
||||
config.headers['X-API-Key'] = apiKey;
|
||||
}
|
||||
|
||||
return config;
|
||||
},
|
||||
(error) => {
|
||||
return Promise.reject(error);
|
||||
}
|
||||
);
|
||||
|
||||
// Add response interceptor to handle common errors
|
||||
axiosClient.interceptors.response.use(
|
||||
(response) => response,
|
||||
(error) => {
|
||||
// Handle 401 unauthorized errors
|
||||
if (error.response && error.response.status === 401) {
|
||||
console.log("Missing Seo Files")
|
||||
}
|
||||
|
||||
return Promise.reject(error);
|
||||
}
|
||||
);
|
||||
|
||||
export default axiosClient;
|
||||
Loading…
Reference in a new issue