Skip to content

Commit

Permalink
Merge branch 'release/1.1.0'
Browse files Browse the repository at this point in the history
  • Loading branch information
cliffparnitzky committed Sep 29, 2016
2 parents 3b9be7d + e2b1fbe commit c153d7a
Show file tree
Hide file tree
Showing 6 changed files with 112 additions and 80 deletions.
9 changes: 8 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,9 +1,16 @@
Contao Extension "hofff/contao-robots-txt-editor"
-------------------------------------------------

### Version 1.1.0 (2016-09-29) ###
- Adds multilingual support (see #3)
- Improves creation and cleanup of files

### Version 1.0.0 (2016-08-31) ###
- Initial release

### Version 1.0.0-beta2 (2016-08-31) ###
- Adds Multi-Site-Installation (see #1)
- Improves extending `sitemap` section in BE (see #2)

### Version 1.0.0-beta1 (2016-06-02) ###
- Initial release
- Initial beta
Original file line number Diff line number Diff line change
Expand Up @@ -56,59 +56,60 @@ public function importRobotsTxt(\DataContainer $dc)
}

/**
* Create the robots.txt
* @param \DataContainer
* Generate the robots.txt files
*/
public function createRobotsTxt(\DataContainer $dc)
public static function generateRobotsTxts()
{
$filePath = TL_ROOT . "/" . FILE_ROBOTS_TXT;

$objPage = $dc->activeRecord;

if ($objPage != null)
// delete all existing domain specific robots.txt files
foreach (scandir(static::getDomainSpecificFolderPath(true)) as $entry)
{
if (static::isDomainSpecicCreationAllowed($dc->activeRecord->useDomainSpecificRobotsTxt))
if (!is_dir($entry) &&
($pos = strpos($entry, FILE_ROBOTS_TXT_DOMAIN_SPECIFIC_PREFIX)) !== FALSE && $pos == 0 &&
($pos = strpos($entry, FILE_ROBOTS_TXT_DOMAIN_SPECIFIC_SUFFIX)) !== FALSE && $pos == (strlen($entry) - strlen(FILE_ROBOTS_TXT_DOMAIN_SPECIFIC_SUFFIX)))
{
$filePath = TL_ROOT . "/" . static::getDomainSpecificFilePath($dc->activeRecord->alias);

// delete the old file, if the alias was changed
$objOldPage = \Contao\Database::getInstance()->prepare("SELECT * FROM tl_version WHERE fromTable=? AND pid=? ORDER BY version DESC")
->limit(1)
->execute('tl_page', $dc->id);

if ($objOldPage != null && ($strAliasOld = deserialize($objOldPage->data)['alias']) && $strAliasOld!= $objPage->alias)
{
\Message::addInfo($GLOBALS['TL_LANG']['MSC']['DomainSpecificRobotsTxt_cleared']);
$filePathOld = TL_ROOT . "/" . static::getDomainSpecificFilePath($strAliasOld);
$filePathOld = static::getDomainSpecificFolderPath(true) . "/" . $entry;

if (file_exists($filePathOld))
{
unlink($filePathOld);
}
if (file_exists($filePathOld))
{
unlink($filePathOld);
}
}
}

// create all robots.txt files
$blnGenerationSuccess = true;

$objFallbackRootPage = static::getFallbackRootPages();
while ($objFallbackRootPage->next())
{
$filePath = TL_ROOT . "/" . FILE_ROBOTS_TXT;

if (static::isDomainSpecicCreationAllowed($objFallbackRootPage->useDomainSpecificRobotsTxt))
{
$filePath = static::getDomainSpecificFilePath($objFallbackRootPage->alias, true);
}

$fileContent = $objPage->robotsTxtContent;
$fileContent = $objFallbackRootPage->robotsTxtContent;

if ($objPage->createSitemap && $objPage->sitemapName != '' && $objPage->robotsTxtAddAbsoluteSitemapPath)
if ($objFallbackRootPage->createSitemap && $objFallbackRootPage->sitemapName != '' && $objFallbackRootPage->robotsTxtAddAbsoluteSitemapPath)
{
$strDomain = ($objPage->useSSL ? 'https://' : 'http://') . ($objPage->dns ?: \Environment::get('host')) . TL_PATH . '/';
$strDomain = ($objFallbackRootPage->useSSL ? 'https://' : 'http://') . ($objFallbackRootPage->dns ?: \Environment::get('host')) . TL_PATH . '/';

$fileContent .= "\n";
$fileContent .= "Sitemap: " . $strDomain . "share/" . $objPage->sitemapName . ".xml";
$objRootPage = static::getRootPagesByDns($objFallbackRootPage->dns);
while ($objRootPage->next())
{
$fileContent .= "Sitemap: " . $strDomain . "share/" . $objRootPage->sitemapName . ".xml\n";
}
}

if (file_put_contents($filePath, $fileContent) === FALSE)
{
return false;
}
else
{
return true;
$blnGenerationSuccess = false;
}
}

return false;
return $blnGenerationSuccess;
}

/**
Expand All @@ -133,8 +134,39 @@ public static function isDomainSpecicCreationAllowed ($blnUseDomainSpecificRobot
/**
* Returns the file path to the domain specific robots.txt file.
*/
public static function getDomainSpecificFilePath ($strAlias)
public static function getDomainSpecificFolderPath ($blnFullPath = false)
{
$domainSpecificFolderPath = FILE_ROBOTS_TXT_DOMAIN_SPECIFIC_Folder;
if ($blnFullPath)
{
$domainSpecificFolderPath = TL_ROOT . "/" . $domainSpecificFolderPath;
}
return $domainSpecificFolderPath;
}

/**
* Returns the file path to the domain specific robots.txt file.
*/
public static function getDomainSpecificFilePath ($strAlias, $blnFullPath = false)
{
return static::getDomainSpecificFolderPath($blnFullPath) . "/" . FILE_ROBOTS_TXT_DOMAIN_SPECIFIC_PREFIX . $strAlias . FILE_ROBOTS_TXT_DOMAIN_SPECIFIC_SUFFIX;
}

/**
* Returns the fallback root page for a dns.
*/
public static function getFallbackRootPages()
{
return \Database::getInstance()->prepare("SELECT * FROM tl_page WHERE published = 1 AND fallback = 1 ")
->execute($strDns);
}

/**
* Returns the root pages for a dns.
*/
public static function getRootPagesByDns($strDns)
{
return FILE_ROBOTS_TXT_DOMAIN_SPECIFIC_Folder . "/" . FILE_ROBOTS_TXT_DOMAIN_SPECIFIC_PREFIX . $strAlias . FILE_ROBOTS_TXT_DOMAIN_SPECIFIC_SUFFIX;;
return \Database::getInstance()->prepare("SELECT * FROM tl_page WHERE published = 1 AND dns = ? ORDER BY fallback DESC, sorting")
->execute($strDns);
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ public static function getSubscribedEvents()
*/
public function generateRewrites(\Bit3\Contao\Htaccess\Event\GenerateRewritesEvent $event)
{
$objPages = \Contao\Database::getInstance()->prepare("SELECT alias, dns FROM tl_page WHERE createRobotsTxt = 1 AND useDomainSpecificRobotsTxt = 1 AND published = 1")->execute();
$objPages = \Contao\Database::getInstance()->prepare("SELECT alias, dns FROM tl_page WHERE createRobotsTxt = 1 AND useDomainSpecificRobotsTxt = 1 AND published = 1 AND fallback = 1")->execute();

while ($objPages->next())
{
Expand Down
65 changes: 31 additions & 34 deletions CT_ROOT/system/modules/hofff_robots-txt-editor/dca/tl_page.php
Original file line number Diff line number Diff line change
Expand Up @@ -2,21 +2,9 @@

$GLOBALS['TL_DCA']['tl_page']['config']['onload_callback'][] = array('tl_page_hofff_robots_txt_editor', 'modifyPaletteAndFields');

$arrLegends = explode(";", $GLOBALS['TL_DCA']['tl_page']['palettes']['root']);
$legendKeyToInsert = 0;
foreach($arrLegends as $legendKey=>$legend)
{
if (strpos($legend, "{sitemap") === 0)
{
$legendKeyToInsert = $legendKey;
break;
}
}
array_splice($arrLegends, $legendKeyToInsert, 0, "{robotstxt_legend:hide},createRobotsTxt");
$GLOBALS['TL_DCA']['tl_page']['palettes']['root'] = implode(";", $arrLegends);
$GLOBALS['TL_DCA']['tl_page']['palettes']['__selector__'][] = "createRobotsTxt";
$GLOBALS['TL_DCA']['tl_page']['fields']['dns']['eval']['mandatory'] = true;

$GLOBALS['TL_DCA']['tl_page']['subpalettes']['createRobotsTxt'] = "robotsTxtContent,useDomainSpecificRobotsTxt";
$GLOBALS['TL_DCA']['tl_page']['fields']['fallback']['eval']['submitOnChange'] = true;

$GLOBALS['TL_DCA']['tl_page']['fields']['createRobotsTxt'] = array
(
Expand Down Expand Up @@ -85,44 +73,53 @@ public function __construct()
public function modifyPaletteAndFields($dc)
{
$objPage = \Database::getInstance()->prepare("SELECT * FROM tl_page WHERE id = ?")->execute($dc->id);
if ($objPage->next())
if ($objPage->next() && $objPage->fallback)
{
$arrLegends = explode(";", $GLOBALS['TL_DCA']['tl_page']['palettes']['root']);
$legendKeyToInsert = 0;
foreach($arrLegends as $legendKey=>$legend)
{
if (strpos($legend, "{sitemap") === 0)
{
$legendKeyToInsert = $legendKey;
break;
}
}
array_splice($arrLegends, $legendKeyToInsert, 0, "{robotstxt_legend:hide},createRobotsTxt");
$GLOBALS['TL_DCA']['tl_page']['palettes']['root'] = implode(";", $arrLegends);
$GLOBALS['TL_DCA']['tl_page']['palettes']['__selector__'][] = "createRobotsTxt";

$GLOBALS['TL_DCA']['tl_page']['subpalettes']['createRobotsTxt'] = "robotsTxtContent,useDomainSpecificRobotsTxt";

if ($objPage->createRobotsTxt)
{
$GLOBALS['TL_DCA']['tl_page']['subpalettes']['createSitemap'] = $GLOBALS['TL_DCA']['tl_page']['subpalettes']['createSitemap'] . ',robotsTxtAddAbsoluteSitemapPath';
}

$GLOBALS['TL_DCA']['tl_page']['fields']['dns']['eval']['mandatory'] = $objPage->useDomainSpecificRobotsTxt;
}
}


/**
* Update the robots.txt when saving the page.
* Update the robots.txt when the page was stored.
*/
public function updateRobotsTxt(DataContainer $dc)
{
if ($dc->activeRecord->createRobotsTxt)
if (Hofff\Contao\RobotsTxtEditor\RobotsTxtEditor::generateRobotsTxts())
{
$robotsTxtEditor = new Hofff\Contao\RobotsTxtEditor\RobotsTxtEditor();
if ($robotsTxtEditor->createRobotsTxt($dc))
{
\Message::addConfirmation($GLOBALS['TL_LANG']['MSC']['robotstxt_updated']);
}
else
{
\Message::addError($GLOBALS['TL_LANG']['ERR']['robotstxt_not_updated']);
}
\Message::addConfirmation($GLOBALS['TL_LANG']['MSC']['robotstxt_updated']);
}
else
{
\Message::addError($GLOBALS['TL_LANG']['ERR']['robotstxt_not_updated']);
}
}

/**
* Update the .htaccess when the page was stored.
*/
public function updateHtaccess(DataContainer $dc)
{
if (Hofff\Contao\RobotsTxtEditor\RobotsTxtEditor::isDomainSpecicCreationAllowed($dc->activeRecord->useDomainSpecificRobotsTxt))
{
$objHtaccess = Bit3\Contao\Htaccess\Htaccess::getInstance();
$objHtaccess->update();
}
$objHtaccess = Bit3\Contao\Htaccess\Htaccess::getInstance();
$objHtaccess->update();
}

/**
Expand Down
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
<?php

$GLOBALS['TL_LANG']['ERR']['no_robotstxt_default'] = "Es befindet sich keine <b>" . FILE_ROBOTS_TXT_DEFAULT . "</b> in ihrem Contao Root.<br/>Diese Datei hätte bei der Installation der Erweiterung initial erstellt werden sollen.<br/><br/>Der Import wurde abgebrochen.<br/><br/>Bitte installieren Sie die Erweiterung erneut.";
$GLOBALS['TL_LANG']['ERR']['robotstxt_not_updated'] = "Die " . FILE_ROBOTS_TXT . " Datei konnte nicht neu erstellt werden (ggf. ist der Zugriff auf die Datei gesperrt).";
$GLOBALS['TL_LANG']['ERR']['robotstxt_not_updated'] = "Die " . FILE_ROBOTS_TXT . " Dateien konnten nicht neu erstellt werden (ggf. ist der Zugriff auf mind. eine Datei gesperrt).";

$GLOBALS['TL_LANG']['MSC']['robotstxt_updated'] = "Die " . FILE_ROBOTS_TXT . " Datei wurde neu erstellt.";

$GLOBALS['TL_LANG']['MSC']['DomainSpecificRobotsTxt_cleared'] = "Der Seitenalias wurde geändert, deshalb wurde die alte domainspezifische robots.txt gelöscht.";
$GLOBALS['TL_LANG']['MSC']['robotstxt_updated'] = "Die " . FILE_ROBOTS_TXT . " Dateien wurden neu erstellt.";
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
<?php

$GLOBALS['TL_LANG']['ERR']['no_robotstxt_default'] = "There is no <b>" . FILE_ROBOTS_TXT_DEFAULT . "</b> in your Contao root.<br/>This file should be created initially when you installed the extension.<br/><br/>The import was aborted.<br/><br/>Please install the extension again.";
$GLOBALS['TL_LANG']['ERR']['robotstxt_not_updated'] = "The " . FILE_ROBOTS_TXT . " file has not been recreated (possibly, the access to the file is locked).";
$GLOBALS['TL_LANG']['ERR']['robotstxt_not_updated'] = "The " . FILE_ROBOTS_TXT . " files have not been recreated (possibly, the access to at least one file is locked).";

$GLOBALS['TL_LANG']['MSC']['robotstxt_updated'] = "The " . FILE_ROBOTS_TXT . " file has been recreated.";

$GLOBALS['TL_LANG']['MSC']['DomainSpecificRobotsTxt_cleared'] = "The page alias has been changed, so the old domain specific robots.txt was deleted.";
$GLOBALS['TL_LANG']['MSC']['robotstxt_updated'] = "The " . FILE_ROBOTS_TXT . " files have been recreated.";

0 comments on commit c153d7a

Please sign in to comment.