$output_url . $file_name,
'changefreq' => 'daily'
)
);
}
write_file($output_paths['index_file'], urlset($index_urls));
}
# Generate sitemap of standard site elements.
function standard_map() {
global $output_paths;
$standard_map_urls .= url(
array(
'url' => common_local_url('public'),
'changefreq' => 'daily',
'priority' => '1',
)
);
$standard_map_urls .= url(
array(
'url' => common_local_url('publicrss'),
'changefreq' => 'daily',
'priority' => '0.3',
)
);
$docs = array('about', 'faq', 'contact', 'im', 'openid', 'openmublog', 'privacy', 'source');
foreach($docs as $title) {
$standard_map_urls .= url(
array(
'url' => common_local_url('doc', array('title' => $title)),
'changefreq' => 'monthly',
'priority' => '0.2',
)
);
}
$urlset_path = $output_paths['output_dir'] . 'standard.xml';
write_file($urlset_path, urlset($standard_map_urls));
}
# Generate sitemaps of all notices.
function notices_map() {
global $output_paths;
$notices = DB_DataObject::factory('notice');
$notices->query('SELECT uri, modified FROM notice');
$notice_count = 0;
$map_count = 1;
while ($notices->fetch()) {
# Maximum 50,000 URLs per sitemap file.
if ($notice_count == 50000) {
$notice_count = 0;
$map_count++;
}
$notice = array(
'url' => $notices->uri,
'lastmod' => common_date_w3dtf($notices->modified),
'changefreq' => 'daily',
'priority' => '1',
);
$notice_list[$map_count] .= url($notice);
$notice_count++;
}
# Make full sitemaps from the lists and save them.
array_to_map($notice_list, 'notice');
}
# Generate sitemaps of all users.
function user_map() {
global $output_paths;
$users = DB_DataObject::factory('user');
$users->query('SELECT id, nickname FROM user');
$user_count = 0;
$map_count = 1;
while ($users->fetch()) {
# Maximum 50,000 URLs per sitemap file.
if ($user_count == 50000) {
$user_count = 0;
$map_count++;
}
$user_args = array('nickname' => $users->nickname);
# Define parameters for generating elements.
$user = array(
'url' => common_local_url('showstream', $user_args),
'changefreq' => 'daily',
'priority' => '1',
);
$user_rss = array(
'url' => common_local_url('userrss', $user_args),
'changefreq' => 'daily',
'priority' => '0.3',
);
$all = array(
'url' => common_local_url('all', $user_args),
'changefreq' => 'daily',
'priority' => '1',
);
$all_rss = array(
'url' => common_local_url('allrss', $user_args),
'changefreq' => 'daily',
'priority' => '0.3',
);
$replies = array(
'url' => common_local_url('replies', $user_args),
'changefreq' => 'daily',
'priority' => '1',
);
$replies_rss = array(
'url' => common_local_url('repliesrss', $user_args),
'changefreq' => 'daily',
'priority' => '0.3',
);
$foaf = array(
'url' => common_local_url('foaf', $user_args),
'changefreq' => 'weekly',
'priority' => '0.5',
);
# Construct a element for each user facet and add it
# to our existing list of those.
$user_list[$map_count] .= url($user);
$user_rss_list[$map_count] .= url($user_rss);
$all_list[$map_count] .= url($all);
$all_rss_list[$map_count] .= url($all_rss);
$replies_list[$map_count] .= url($replies);
$replies_rss_list[$map_count] .= url($replies_rss);
$foaf_list[$map_count] .= url($foaf);
$user_count++;
}
# Make full sitemaps from the lists and save them.
# Possible factoring: put all the lists into a master array, thus allowing
# calling with single argument (i.e., array_to_map('user')).
array_to_map($user_list, 'user');
array_to_map($user_rss_list, 'user_rss');
array_to_map($all_list, 'all');
array_to_map($all_rss_list, 'all_rss');
array_to_map($replies_list, 'replies');
array_to_map($replies_rss_list, 'replies_rss');
array_to_map($foaf_list, 'foaf');
}
# Generate sitemaps of all avatars.
function avatar_map() {
global $output_paths;
$avatars = DB_DataObject::factory('avatar');
$avatars->query('SELECT url, modified FROM avatar');
$avatar_count = 0;
$map_count = 1;
while ($avatars->fetch()) {
# We only want the original size and 24px thumbnail version - skip 96px.
if (preg_match('/-96-/', $avatars->url)) {
continue;
}
# Maximum 50,000 URLs per sitemap file.
if ($avatar_count == 50000) {
$avatar_count = 0;
$map_count++;
}
w3cdate($avatars->modified);
$image = array(
'url' => common_avatar_display_url($avatars),
'lastmod' => common_date_w3dtf($avatars->modified),
'changefreq' => 'monthly',
'priority' => '0.2',
);
# Construct a element for each avatar and add it
# to our existing list of those.
$avatar_list[$map_count] .= url($image);
}
array_to_map($avatar_list, 'avatars');
}
# ------------------------------------------------------------------------------
# XML generation functions
# ------------------------------------------------------------------------------
# Generate a element.
function url($url_args) {
$url = preg_replace('/&/', '&', $url_args['url']); # escape ampersands for XML
$lastmod = $url_args['lastmod'];
$changefreq = $url_args['changefreq'];
$priority = $url_args['priority'];
if (is_null($url)) {
error("url() arguments require 'url' value.");
}
$url_out = "\t\n";
$url_out .= "\t\t$url\n";
if ($changefreq) {
$url_out .= "\t\t$changefreq\n";
}
if ($lastmod) {
$url_out .= "\t\t$lastmod\n";
}
if ($priority) {
$url_out .= "\t\t$priority\n";
}
$url_out .= "\t\n";
return $url_out;
}
# Generate a element.
function urlset($urlset_text) {
$urlset = '' . "\n" .
'' . "\n" .
$urlset_text .
'';
return $urlset;
}
# Generate a sitemap from an array containing elements and write it to a file.
function array_to_map($url_list, $filename_prefix) {
global $output_paths;
if ($url_list) {
# $map_urls is a long string containing concatenated elements.
while (list($map_idx, $map_urls) = each($url_list)) {
$urlset_path = $output_paths['output_dir'] . "$filename_prefix-$map_idx.xml";
write_file($urlset_path, urlset($map_urls));
}
}
}
# ------------------------------------------------------------------------------
# Internal functions
# ------------------------------------------------------------------------------
# Parse command line arguments.
function parse_args() {
$args = getopt('f:d:u:');
if (is_null($args[f]) && is_null($args[d]) && is_null($args[u])) {
error('Mandatory arguments: -f -d