diff --git a/CHANGELOG.md b/CHANGELOG.md
index e92cae3..aa74d13 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,9 +1,16 @@
Contao Extension "hofff/contao-robots-txt-editor"
-------------------------------------------------
+### Version 1.1.0 (2016-09-29) ###
+- Adds multilingual support (see #3)
+- Improves creation and cleanup of files
+
+### Version 1.0.0 (2016-08-31) ###
+- Initial release
+
### Version 1.0.0-beta2 (2016-08-31) ###
- Adds Multi-Site-Installation (see #1)
- Improves extending `sitemap` section in BE (see #2)
### Version 1.0.0-beta1 (2016-06-02) ###
-- Initial release
\ No newline at end of file
+- Initial beta
\ No newline at end of file
diff --git a/CT_ROOT/system/modules/hofff_robots-txt-editor/classes/RobotsTxtEditor.php b/CT_ROOT/system/modules/hofff_robots-txt-editor/classes/RobotsTxtEditor.php
index 887c730..228be7a 100644
--- a/CT_ROOT/system/modules/hofff_robots-txt-editor/classes/RobotsTxtEditor.php
+++ b/CT_ROOT/system/modules/hofff_robots-txt-editor/classes/RobotsTxtEditor.php
@@ -56,59 +56,60 @@ public function importRobotsTxt(\DataContainer $dc)
}
/**
- * Create the robots.txt
- * @param \DataContainer
+ * Generate the robots.txt files
*/
- public function createRobotsTxt(\DataContainer $dc)
+ public static function generateRobotsTxts()
{
- $filePath = TL_ROOT . "/" . FILE_ROBOTS_TXT;
-
- $objPage = $dc->activeRecord;
-
- if ($objPage != null)
+ // delete all existing domain specific robots.txt files
+ foreach (scandir(static::getDomainSpecificFolderPath(true)) as $entry)
{
- if (static::isDomainSpecicCreationAllowed($dc->activeRecord->useDomainSpecificRobotsTxt))
+ if (!is_dir($entry) &&
+ ($pos = strpos($entry, FILE_ROBOTS_TXT_DOMAIN_SPECIFIC_PREFIX)) !== FALSE && $pos == 0 &&
+ ($pos = strpos($entry, FILE_ROBOTS_TXT_DOMAIN_SPECIFIC_SUFFIX)) !== FALSE && $pos == (strlen($entry) - strlen(FILE_ROBOTS_TXT_DOMAIN_SPECIFIC_SUFFIX)))
{
- $filePath = TL_ROOT . "/" . static::getDomainSpecificFilePath($dc->activeRecord->alias);
-
- // delete the old file, if the alias was changed
- $objOldPage = \Contao\Database::getInstance()->prepare("SELECT * FROM tl_version WHERE fromTable=? AND pid=? ORDER BY version DESC")
- ->limit(1)
- ->execute('tl_page', $dc->id);
-
- if ($objOldPage != null && ($strAliasOld = deserialize($objOldPage->data)['alias']) && $strAliasOld!= $objPage->alias)
- {
- \Message::addInfo($GLOBALS['TL_LANG']['MSC']['DomainSpecificRobotsTxt_cleared']);
- $filePathOld = TL_ROOT . "/" . static::getDomainSpecificFilePath($strAliasOld);
+ $filePathOld = static::getDomainSpecificFolderPath(true) . "/" . $entry;
- if (file_exists($filePathOld))
- {
- unlink($filePathOld);
- }
+ if (file_exists($filePathOld))
+ {
+ unlink($filePathOld);
}
}
+ }
+
+ // create all robots.txt files
+ $blnGenerationSuccess = true;
+
+ $objFallbackRootPage = static::getFallbackRootPages();
+ while ($objFallbackRootPage->next())
+ {
+ $filePath = TL_ROOT . "/" . FILE_ROBOTS_TXT;
+
+ if (static::isDomainSpecicCreationAllowed($objFallbackRootPage->useDomainSpecificRobotsTxt))
+ {
+ $filePath = static::getDomainSpecificFilePath($objFallbackRootPage->alias, true);
+ }
- $fileContent = $objPage->robotsTxtContent;
+ $fileContent = $objFallbackRootPage->robotsTxtContent;
- if ($objPage->createSitemap && $objPage->sitemapName != '' && $objPage->robotsTxtAddAbsoluteSitemapPath)
+ if ($objFallbackRootPage->createSitemap && $objFallbackRootPage->sitemapName != '' && $objFallbackRootPage->robotsTxtAddAbsoluteSitemapPath)
{
- $strDomain = ($objPage->useSSL ? 'https://' : 'http://') . ($objPage->dns ?: \Environment::get('host')) . TL_PATH . '/';
+ $strDomain = ($objFallbackRootPage->useSSL ? 'https://' : 'http://') . ($objFallbackRootPage->dns ?: \Environment::get('host')) . TL_PATH . '/';
$fileContent .= "\n";
- $fileContent .= "Sitemap: " . $strDomain . "share/" . $objPage->sitemapName . ".xml";
+ $objRootPage = static::getRootPagesByDns($objFallbackRootPage->dns);
+ while ($objRootPage->next())
+ {
+ $fileContent .= "Sitemap: " . $strDomain . "share/" . $objRootPage->sitemapName . ".xml\n";
+ }
}
if (file_put_contents($filePath, $fileContent) === FALSE)
{
- return false;
- }
- else
- {
- return true;
+ $blnGenerationSuccess = false;
}
}
- return false;
+ return $blnGenerationSuccess;
}
/**
@@ -133,8 +134,39 @@ public static function isDomainSpecicCreationAllowed ($blnUseDomainSpecificRobot
/**
* Returns the file path to the domain specific robots.txt file.
*/
- public static function getDomainSpecificFilePath ($strAlias)
+ public static function getDomainSpecificFolderPath ($blnFullPath = false)
+ {
+ $domainSpecificFolderPath = FILE_ROBOTS_TXT_DOMAIN_SPECIFIC_Folder;
+ if ($blnFullPath)
+ {
+ $domainSpecificFolderPath = TL_ROOT . "/" . $domainSpecificFolderPath;
+ }
+ return $domainSpecificFolderPath;
+ }
+
+ /**
+ * Returns the file path to the domain specific robots.txt file.
+ */
+ public static function getDomainSpecificFilePath ($strAlias, $blnFullPath = false)
+ {
+ return static::getDomainSpecificFolderPath($blnFullPath) . "/" . FILE_ROBOTS_TXT_DOMAIN_SPECIFIC_PREFIX . $strAlias . FILE_ROBOTS_TXT_DOMAIN_SPECIFIC_SUFFIX;
+ }
+
+ /**
+ * Returns the fallback root page for a dns.
+ */
+ public static function getFallbackRootPages()
+ {
+ return \Database::getInstance()->prepare("SELECT * FROM tl_page WHERE published = 1 AND fallback = 1 ")
+ ->execute($strDns);
+ }
+
+ /**
+ * Returns the root pages for a dns.
+ */
+ public static function getRootPagesByDns($strDns)
{
- return FILE_ROBOTS_TXT_DOMAIN_SPECIFIC_Folder . "/" . FILE_ROBOTS_TXT_DOMAIN_SPECIFIC_PREFIX . $strAlias . FILE_ROBOTS_TXT_DOMAIN_SPECIFIC_SUFFIX;;
+ return \Database::getInstance()->prepare("SELECT * FROM tl_page WHERE published = 1 AND dns = ? ORDER BY fallback DESC, sorting")
+ ->execute($strDns);
}
}
\ No newline at end of file
diff --git a/CT_ROOT/system/modules/hofff_robots-txt-editor/classes/RobotsTxtEditorHtaccessWriter.php b/CT_ROOT/system/modules/hofff_robots-txt-editor/classes/RobotsTxtEditorHtaccessWriter.php
index 5ecb922..c4d13b7 100644
--- a/CT_ROOT/system/modules/hofff_robots-txt-editor/classes/RobotsTxtEditorHtaccessWriter.php
+++ b/CT_ROOT/system/modules/hofff_robots-txt-editor/classes/RobotsTxtEditorHtaccessWriter.php
@@ -44,7 +44,7 @@ public static function getSubscribedEvents()
*/
public function generateRewrites(\Bit3\Contao\Htaccess\Event\GenerateRewritesEvent $event)
{
- $objPages = \Contao\Database::getInstance()->prepare("SELECT alias, dns FROM tl_page WHERE createRobotsTxt = 1 AND useDomainSpecificRobotsTxt = 1 AND published = 1")->execute();
+ $objPages = \Contao\Database::getInstance()->prepare("SELECT alias, dns FROM tl_page WHERE createRobotsTxt = 1 AND useDomainSpecificRobotsTxt = 1 AND published = 1 AND fallback = 1")->execute();
while ($objPages->next())
{
diff --git a/CT_ROOT/system/modules/hofff_robots-txt-editor/dca/tl_page.php b/CT_ROOT/system/modules/hofff_robots-txt-editor/dca/tl_page.php
index 83bbaed..b5dff49 100644
--- a/CT_ROOT/system/modules/hofff_robots-txt-editor/dca/tl_page.php
+++ b/CT_ROOT/system/modules/hofff_robots-txt-editor/dca/tl_page.php
@@ -2,21 +2,9 @@
$GLOBALS['TL_DCA']['tl_page']['config']['onload_callback'][] = array('tl_page_hofff_robots_txt_editor', 'modifyPaletteAndFields');
-$arrLegends = explode(";", $GLOBALS['TL_DCA']['tl_page']['palettes']['root']);
-$legendKeyToInsert = 0;
-foreach($arrLegends as $legendKey=>$legend)
-{
- if (strpos($legend, "{sitemap") === 0)
- {
- $legendKeyToInsert = $legendKey;
- break;
- }
-}
-array_splice($arrLegends, $legendKeyToInsert, 0, "{robotstxt_legend:hide},createRobotsTxt");
-$GLOBALS['TL_DCA']['tl_page']['palettes']['root'] = implode(";", $arrLegends);
-$GLOBALS['TL_DCA']['tl_page']['palettes']['__selector__'][] = "createRobotsTxt";
+$GLOBALS['TL_DCA']['tl_page']['fields']['dns']['eval']['mandatory'] = true;
-$GLOBALS['TL_DCA']['tl_page']['subpalettes']['createRobotsTxt'] = "robotsTxtContent,useDomainSpecificRobotsTxt";
+$GLOBALS['TL_DCA']['tl_page']['fields']['fallback']['eval']['submitOnChange'] = true;
$GLOBALS['TL_DCA']['tl_page']['fields']['createRobotsTxt'] = array
(
@@ -85,44 +73,53 @@ public function __construct()
public function modifyPaletteAndFields($dc)
{
$objPage = \Database::getInstance()->prepare("SELECT * FROM tl_page WHERE id = ?")->execute($dc->id);
- if ($objPage->next())
+ if ($objPage->next() && $objPage->fallback)
{
+ $arrLegends = explode(";", $GLOBALS['TL_DCA']['tl_page']['palettes']['root']);
+ $legendKeyToInsert = 0;
+ foreach($arrLegends as $legendKey=>$legend)
+ {
+ if (strpos($legend, "{sitemap") === 0)
+ {
+ $legendKeyToInsert = $legendKey;
+ break;
+ }
+ }
+ array_splice($arrLegends, $legendKeyToInsert, 0, "{robotstxt_legend:hide},createRobotsTxt");
+ $GLOBALS['TL_DCA']['tl_page']['palettes']['root'] = implode(";", $arrLegends);
+ $GLOBALS['TL_DCA']['tl_page']['palettes']['__selector__'][] = "createRobotsTxt";
+
+ $GLOBALS['TL_DCA']['tl_page']['subpalettes']['createRobotsTxt'] = "robotsTxtContent,useDomainSpecificRobotsTxt";
+
if ($objPage->createRobotsTxt)
{
$GLOBALS['TL_DCA']['tl_page']['subpalettes']['createSitemap'] = $GLOBALS['TL_DCA']['tl_page']['subpalettes']['createSitemap'] . ',robotsTxtAddAbsoluteSitemapPath';
}
-
- $GLOBALS['TL_DCA']['tl_page']['fields']['dns']['eval']['mandatory'] = $objPage->useDomainSpecificRobotsTxt;
}
}
-
/**
- * Update the robots.txt when saving the page.
+ * Update the robots.txt when the page was stored.
*/
public function updateRobotsTxt(DataContainer $dc)
{
- if ($dc->activeRecord->createRobotsTxt)
+ if (Hofff\Contao\RobotsTxtEditor\RobotsTxtEditor::generateRobotsTxts())
{
- $robotsTxtEditor = new Hofff\Contao\RobotsTxtEditor\RobotsTxtEditor();
- if ($robotsTxtEditor->createRobotsTxt($dc))
- {
- \Message::addConfirmation($GLOBALS['TL_LANG']['MSC']['robotstxt_updated']);
- }
- else
- {
- \Message::addError($GLOBALS['TL_LANG']['ERR']['robotstxt_not_updated']);
- }
+ \Message::addConfirmation($GLOBALS['TL_LANG']['MSC']['robotstxt_updated']);
+ }
+ else
+ {
+ \Message::addError($GLOBALS['TL_LANG']['ERR']['robotstxt_not_updated']);
}
}
+ /**
+ * Update the .htaccess when the page was stored.
+ */
public function updateHtaccess(DataContainer $dc)
{
- if (Hofff\Contao\RobotsTxtEditor\RobotsTxtEditor::isDomainSpecicCreationAllowed($dc->activeRecord->useDomainSpecificRobotsTxt))
- {
- $objHtaccess = Bit3\Contao\Htaccess\Htaccess::getInstance();
- $objHtaccess->update();
- }
+ $objHtaccess = Bit3\Contao\Htaccess\Htaccess::getInstance();
+ $objHtaccess->update();
}
/**
diff --git a/CT_ROOT/system/modules/hofff_robots-txt-editor/languages/de/default.php b/CT_ROOT/system/modules/hofff_robots-txt-editor/languages/de/default.php
index e48a07f..7ed35c3 100644
--- a/CT_ROOT/system/modules/hofff_robots-txt-editor/languages/de/default.php
+++ b/CT_ROOT/system/modules/hofff_robots-txt-editor/languages/de/default.php
@@ -1,8 +1,6 @@
" . FILE_ROBOTS_TXT_DEFAULT . " in ihrem Contao Root.
Diese Datei hätte bei der Installation der Erweiterung initial erstellt werden sollen.
Der Import wurde abgebrochen.
Bitte installieren Sie die Erweiterung erneut.";
-$GLOBALS['TL_LANG']['ERR']['robotstxt_not_updated'] = "Die " . FILE_ROBOTS_TXT . " Datei konnte nicht neu erstellt werden (ggf. ist der Zugriff auf die Datei gesperrt).";
+$GLOBALS['TL_LANG']['ERR']['robotstxt_not_updated'] = "Die " . FILE_ROBOTS_TXT . " Dateien konnten nicht neu erstellt werden (ggf. ist der Zugriff auf mind. eine Datei gesperrt).";
-$GLOBALS['TL_LANG']['MSC']['robotstxt_updated'] = "Die " . FILE_ROBOTS_TXT . " Datei wurde neu erstellt.";
-
-$GLOBALS['TL_LANG']['MSC']['DomainSpecificRobotsTxt_cleared'] = "Der Seitenalias wurde geändert, deshalb wurde die alte domainspezifische robots.txt gelöscht.";
\ No newline at end of file
+$GLOBALS['TL_LANG']['MSC']['robotstxt_updated'] = "Die " . FILE_ROBOTS_TXT . " Dateien wurden neu erstellt.";
\ No newline at end of file
diff --git a/CT_ROOT/system/modules/hofff_robots-txt-editor/languages/en/default.php b/CT_ROOT/system/modules/hofff_robots-txt-editor/languages/en/default.php
index eba4db8..99bcd2c 100644
--- a/CT_ROOT/system/modules/hofff_robots-txt-editor/languages/en/default.php
+++ b/CT_ROOT/system/modules/hofff_robots-txt-editor/languages/en/default.php
@@ -1,8 +1,6 @@
" . FILE_ROBOTS_TXT_DEFAULT . " in your Contao root.
This file should be created initially when you installed the extension.
The import was aborted.
Please install the extension again.";
-$GLOBALS['TL_LANG']['ERR']['robotstxt_not_updated'] = "The " . FILE_ROBOTS_TXT . " file has not been recreated (possibly, the access to the file is locked).";
+$GLOBALS['TL_LANG']['ERR']['robotstxt_not_updated'] = "The " . FILE_ROBOTS_TXT . " files have not been recreated (possibly, the access to at least one file is locked).";
-$GLOBALS['TL_LANG']['MSC']['robotstxt_updated'] = "The " . FILE_ROBOTS_TXT . " file has been recreated.";
-
-$GLOBALS['TL_LANG']['MSC']['DomainSpecificRobotsTxt_cleared'] = "The page alias has been changed, so the old domain specific robots.txt was deleted.";
\ No newline at end of file
+$GLOBALS['TL_LANG']['MSC']['robotstxt_updated'] = "The " . FILE_ROBOTS_TXT . " files have been recreated.";
\ No newline at end of file