Forum Moderators: phranque
<?php
$page = isset($_GET['p']) ? $_GET['p'] : 'home';
switch($page) {
/*----------------------- PAGES -----------------------------------*/
case 'about':
$title = 'about';
$keyword = 'some keywords';
$description = 'some description';
break;
case 'contact':
$title = 'contact';
$keyword = 'some keywords';
$description = 'some description';
break;
case 'order':
$title = 'order';
$keyword = 'some keywords';
$description = 'some description';
break;
case 'about/profiles':
$title = 'order';
$keyword = 'some keywords';
$description = 'some description';
break;
default:
$title = 'home';
$keyword = 'some keywords';
$description = 'some description';
break;
}
include('include/header.php');
include(''.$page.'.php');
include('include/footer.php');
?> <a href="<? echo $url; ?>/about">About</a>
<a href="<? echo $url; ?>/about/profiles">Profiles</a>
<a href="<? echo $url; ?>/home">Home</a>
<a href="<? echo $url; ?>/contact">Contact</a>
<a href="<? echo $url; ?>/order">Order</a>
<a href="<? echo $url; ?>/about/profiles">Profiles</a> RewriteEngine On
Rewritebase /
RewriteCond %{REQUEST_FILENAME} !-f
RewriteCond %{REQUEST_FILENAME} !-d
RewriteCond $1 !^robots\.txt$
RewriteRule ^([^/\.]+)/?$ index.php?page=$1 [L]
about/profiles.php
RewriteRule ^([^/.]+)/?$
RewriteCond $1 !^robots\.txt$
RewriteEngine On
Rewritebase /
RewriteCond %{REQUEST_FILENAME} !-f
RewriteCond %{REQUEST_FILENAME} !-d
RewriteCond %{REQUEST_URI} !^robots\.txt$
RewriteRule ^([^/]+)/? index.php?p=$1 [L] I have 5 web pages at the root of my folder called for example:
index.php
about.php
contact.php
order.php
home.php
about/profiles.php
<a href="<? echo $url; ?>/about">About</a>
<a href="<? echo $url; ?>/about/profiles">Profiles</a>
<a href="<? echo $url; ?>/home">Home</a>
<a href="<? echo $url; ?>/contact">Contact</a>
<a href="<? echo $url; ?>/order">Order</a>
<a href="<? echo $url; ?>/about/profiles">Profiles</a>
RewriteCond %{REQUEST_FILENAME} !-f
RewriteCond %{REQUEST_FILENAME} !-d
RewriteCond $1 !^robots\.txt$
RewriteRule ^([^/\.]+)/?$ <Files "robots.txt">
Order Allow,Deny
Allow from all
</Files> (index|about|contact|order|home|profiles) You can set things so that any request for a URL with an extension is served by a real file and extensioness requests are rewritten to be dealt with by your PHP script.