All_in_One_SEO_Pack_Robots

The All In One SEO Pack All in One SEO Pack Robots class.

Defined (1)

The class is defined in the following location(s).

/modules/aioseop_robots.php  
  1. class All_in_One_SEO_Pack_Robots extends All_in_One_SEO_Pack_Module { 
  2.  
  3. function __construct() { 
  4. $this->name = __( 'Robots.txt', 'all-in-one-seo-pack' ); // Human-readable name of the plugin 
  5. $this->prefix = 'aiosp_robots_'; // option prefix 
  6. $this->file = __FILE__; // the current file 
  7. parent::__construct(); 
  8.  
  9. $help_text = Array( 
  10. 'additional' => __( 'Rule Type', 'all-in-one-seo-pack' ),  
  11. 'useragent' => __( 'User Agent', 'all-in-one-seo-pack' ),  
  12. 'path' => __( 'Directory Path', 'all-in-one-seo-pack' ),  
  13. 'robotgen' => __( 'Robots.txt editor', 'all-in-one-seo-pack' ),  
  14. ); 
  15.  
  16. $this->default_options = array( 
  17. 'usage' => Array( 
  18. 'type' => 'html',  
  19. 'label' => 'none',  
  20. 'default' => __( 'Use the rule builder below to add rules to create a new Robots.txt file. If you already have a Robots.txt file you should use the File Editor feature in All in One SEO Pack to edit it or you can delete your current Robots.txt file and start a new one with the rule builder below.', 'all-in-one-seo-pack' ),  
  21. 'save' => false,  
  22. ),  
  23. 'additional' => Array( 
  24. 'name' => __( 'Rule Type', 'all-in-one-seo-pack' ),  
  25. 'save' => false,  
  26. 'type' => 'select',  
  27. 'initial_options' => Array( 'allow' => 'Allow', 'block' => 'Block' ),  
  28. ),  
  29. 'useragent' => Array( 
  30. 'name' => __( 'User Agent', 'all-in-one-seo-pack' ),  
  31. 'save' => false,  
  32. 'type' => 'text',  
  33. ),  
  34. 'path' => Array( 
  35. 'name' => __( 'Directory Path', 'all-in-one-seo-pack' ),  
  36. 'save' => false,  
  37. 'type' => 'text',  
  38. ),  
  39. 'robotgen' => Array( 
  40. 'name' => __( 'Generate Robots.txt', 'all-in-one-seo-pack' ),  
  41. 'save' => false,  
  42. 'default' => '',  
  43. 'type' => 'textarea',  
  44. 'cols' => 57,  
  45. 'rows' => 20,  
  46. 'label' => 'none',  
  47. 'readonly' => 'readonly',  
  48. ),  
  49. 'Submit_Preview' => Array( 
  50. 'type' => 'submit',  
  51. 'class' => 'button-primary MRL',  
  52. 'name' => __( 'Add Rule', 'all-in-one-seo-pack' ) . ' »',  
  53. 'nowrap' => 1,  
  54. ),  
  55. 'Submit_Update' => Array( 
  56. 'type' => 'submit',  
  57. 'class' => 'button-primary',  
  58. 'name' => __( 'Save Robots.txt File', 'all-in-one-seo-pack' ) . ' »',  
  59. 'nowrap' => 1,  
  60. ),  
  61. 'Submit_Delete' => Array( 
  62. 'type' => 'submit',  
  63. 'class' => 'button-primary',  
  64. 'name' => __( 'Delete Robots.txt File', 'all-in-one-seo-pack' ) . ' »',  
  65. 'nowrap' => 1,  
  66. ),  
  67. 'optusage' => Array( 
  68. 'type' => 'html',  
  69. 'label' => 'none',  
  70. 'default' => __( 'Click the Optimize button below and All in One SEO Pack will analyze your Robots.txt file to make sure it complies with the standards for Robots.txt files. The results will be displayed in a table below.', 'all-in-one-seo-pack' ),  
  71. 'save' => false,  
  72. ),  
  73. 'Submit_Opt_Update' => Array( 
  74. 'type' => 'submit',  
  75. 'class' => 'button-primary',  
  76. 'name' => __( 'Update Robots.txt File', 'all-in-one-seo-pack' ) . ' »',  
  77. 'nowrap' => 1,  
  78. 'style' => 'margin-left: 20px;',  
  79. ),  
  80. 'Submit_Opt_Preview' => Array( 
  81. 'type' => 'submit',  
  82. 'class' => 'button-primary',  
  83. 'name' => __( 'Disregard Changes', 'all-in-one-seo-pack' ) . ' »',  
  84. 'nowrap' => 1,  
  85. ),  
  86. 'Submit_Optimize' => Array( 
  87. 'type' => 'submit',  
  88. 'class' => 'button-primary',  
  89. 'name' => __( 'Optimize', 'all-in-one-seo-pack' ) . ' »',  
  90. ),  
  91. ); 
  92.  
  93. if ( ! empty( $help_text ) ) { 
  94. foreach ( $help_text as $k => $v ) { 
  95. $this->default_options[ $k ]['help_text'] = $v; 
  96.  
  97. $this->locations = array( 
  98. 'generator' => Array( 
  99. 'name' => "Robots.txt",  
  100. 'type' => 'settings',  
  101. 'options' => Array( 
  102. 'usage',  
  103. 'additional',  
  104. 'useragent',  
  105. 'path',  
  106. 'Submit_Preview',  
  107. 'Submit_Update',  
  108. 'Submit_Delete',  
  109. 'robotgen',  
  110. 'optusage',  
  111. 'Submit_Opt_Update',  
  112. 'Submit_Opt_Preview',  
  113. 'Submit_Optimize',  
  114. ),  
  115. ),  
  116. ); 
  117.  
  118. $this->layout = Array( 
  119. 'default' => Array( 
  120. 'name' => __( 'Create a Robots.txt File', 'all-in-one-seo-pack' ),  
  121. 'options' => Array( 
  122. 'usage',  
  123. 'additional',  
  124. 'useragent',  
  125. 'path',  
  126. 'Submit_Preview',  
  127. 'Submit_Update',  
  128. 'Submit_Delete',  
  129. 'robotgen',  
  130. ) // this is set below, to the remaining options -- pdb 
  131. ),  
  132. ); 
  133. $this->layout['optimize'] = Array( 
  134. 'name' => __( 'Optimize your Robots.txt File', 'all-in-one-seo-pack' ),  
  135. 'options' => Array( 'optusage', 'Submit_Optimize' ),  
  136. ); 
  137. if ( isset( $_POST['Submit_Optimize'] ) ) { 
  138. $this->layout['optimize']['options'] = Array( 
  139. 'optusage',  
  140. 'Submit_Opt_Update',  
  141. 'Submit_Opt_Preview',  
  142. 'robothtml',  
  143. ); 
  144. $this->default_options['optusage']['default'] = __( "Your Robots.txt file has been optimized. Here are the results and recommendations. Click the Update Robots.txt File button below to write these changes to your Robots.txt file. Click the Disregard Changes button to ignore these recommendations and keep your current Robots.txt file.", 'all-in-one-seo-pack' ); 
  145.  
  146. // load initial options / set defaults 
  147. $this->update_options(); 
  148.  
  149. add_action( $this->prefix . 'settings_update', Array( $this, 'do_robots' ), 10, 2 ); 
  150. add_filter( $this->prefix . 'display_options', Array( $this, 'filter_options' ), 10, 2 ); 
  151. add_filter( $this->prefix . 'submit_options', Array( $this, 'filter_submit' ), 10, 2 ); 
  152. add_filter( $this->prefix . 'display_settings', Array( $this, 'filter_settings' ), 10, 2 ); 
  153.  
  154. function filter_settings( $settings, $location ) { 
  155. if ( $location == 'generator' ) { 
  156. $prefix = $this->get_prefix( $location ) . $location . '_'; 
  157. if ( isset( $_POST['Submit_Optimize'] ) ) { 
  158. if ( isset( $settings[ $prefix . 'robotgen' ] ) ) { 
  159. $settings[ $prefix . 'robotgen' ]['type'] = 'hidden'; 
  160. $settings[ $prefix . 'robotgen' ]['label'] = 'none'; 
  161. $settings[ $prefix . 'robotgen' ]['help_text'] = ''; 
  162. $settings[ $prefix . 'robothtml' ] = Array( 
  163. 'name' => __( 'Robots.txt', 'all-in-one-seo-pack' ),  
  164. 'save' => false,  
  165. 'default' => '',  
  166. 'type' => 'html',  
  167. 'label' => 'none',  
  168. 'style' => 'margin-top:10px;',  
  169. ); 
  170.  
  171. return $settings; 
  172.  
  173. function filter_submit( $submit, $location ) { 
  174. if ( $location == 'generator' ) { 
  175. unset( $submit['Submit_Default'] ); 
  176. $submit['Submit']['type'] = 'hidden'; 
  177.  
  178. return $submit; 
  179.  
  180. /** 
  181. * Returns the sitemap filename; 
  182. * @return bool 
  183. */ 
  184. function get_sitemap_filename() { 
  185.  
  186. global $aioseop_options; 
  187. if ( isset( $aioseop_options['modules']['aiosp_sitemap_options']['aiosp_sitemap_filename'] ) ) { 
  188. return $aioseop_options['modules']['aiosp_sitemap_options']['aiosp_sitemap_filename']; 
  189.  
  190. return false; 
  191.  
  192. /** 
  193. * Filters the options. 
  194. * @todo Much of this couldn't be considered filtering options, and should be extracted to other functions. 
  195. * @since ?? 
  196. * @since 2.3.6 
  197. */ 
  198. function filter_options( $options, $location ) { 
  199. if ( $location ) { 
  200. $prefix = $this->get_prefix( $location ) . $location . '_'; 
  201. if ( $location === 'generator' ) { 
  202. $optimize = false; 
  203. $robotgen = ''; 
  204. if ( ! empty( $_POST[ $prefix . 'robotgen' ] ) ) { 
  205. $robotgen = str_replace( "\r\n", "\n", $_POST[ $prefix . 'robotgen' ] ); 
  206. if ( isset( $_POST['Submit_Preview'] ) ) { 
  207. $options[ $prefix . 'robotgen' ] = $robotgen; 
  208. if ( ! isset( $_POST['Submit_Preview'] ) ) { 
  209. if ( isset( $_POST['Submit_Optimize'] ) && ! isset( $_POST['Submit_Delete'] ) && ! isset( $_POST['Submit_Update'] ) && ! isset( $_POST['Submit_Opt_Update'] ) ) { 
  210. $optimize = true; 
  211. if ( ! isset( $options[ $prefix . 'robotgen' ] ) || empty( $options[ $prefix . 'robotgen' ] ) ) { 
  212. if ( $optimize ) { 
  213. $options[ $prefix . 'robotgen' ] = $robotgen; 
  214. if ( empty( $options[ $prefix . 'robotgen' ] ) ) { 
  215. $options = $this->load_files( $options, Array( 'robotgen' => 'robots.txt' ), $prefix ); 
  216. $access = ( get_option( 'blog_public' ) ) ? 'allow' : 'block'; 
  217. if ( $access ) { 
  218. global $aioseop_options; 
  219. $sitemap_url = ''; 
  220. $sitemap_filename = $this->get_sitemap_filename(); 
  221. if ( $sitemap_filename ) { 
  222. $sitemapurl = trailingslashit( get_home_url() ) . $sitemap_filename . '.xml'; 
  223. $allow_rule = "Sitemap: $sitemapurl \n\n# global\nUser-agent: *\nDisallow: /xmlrpc.php\n\n"; 
  224. $block_rule = "# global\nUser-agent: *\nDisallow: /\n\n"; 
  225. if ( empty( $options[ $prefix . 'robotgen' ] ) ) { 
  226. $options[ $prefix . 'robotgen' ] = ''; 
  227. if ( isset( $_POST['Submit_Preview'] ) && ( ( $options[ $prefix . 'robotgen' ] == $allow_rule ) || 
  228. ( $options[ $prefix . 'robotgen' ] == $block_rule ) ) 
  229. ) { 
  230. $options[ $prefix . 'robotgen' ] = ''; 
  231. if ( $access === 'block' && empty( $options[ $prefix . 'robotgen' ] ) ) { 
  232. $options[ $prefix . 'robotgen' ] .= $block_rule; 
  233. } elseif ( $access === 'allow' && empty( $options[ $prefix . 'robotgen' ] ) ) { 
  234. $options[ $prefix . 'robotgen' ] .= $allow_rule; 
  235. foreach ( Array( 'ad' => 'additional', 'ua' => 'useragent', 'dp' => 'path' ) as $k => $v ) { 
  236. if ( isset( $_POST[ $prefix . $v ] ) ) { 
  237. $$k = $_POST[ $prefix . $v ]; 
  238. if ( ! empty( $ad ) && ! empty( $ua ) && ! empty( $dp ) ) { 
  239. if ( $ad === 'allow' ) { 
  240. $ad = "Allow: "; 
  241. } else { 
  242. $ad = "Disallow: "; 
  243. $options[ $prefix . 'robotgen' ] .= "User-agent: $ua\n$ad $dp\n\n"; 
  244. $file = explode( "\n", $options[ $prefix . 'robotgen' ] ); 
  245. if ( $optimize ) { 
  246. $rules = $this->parse_robots( $file ); 
  247. $user_agents = $this->get_robot_user_agents( $rules ); 
  248. foreach ( $user_agents as $ua => $rules ) { 
  249. $user_agents[ $ua ]['disallow'] = $this->opt_robot_rule( $rules['disallow'] ); 
  250. $user_agents[ $ua ]['allow'] = $this->opt_robot_rule( $rules['allow'] ); 
  251. $rules = $this->flatten_user_agents( $user_agents ); 
  252. unset( $user_agents ); 
  253. foreach ( $rules as $r ) { 
  254. $r['disallow'] = $this->opt_robot_rule( $r['disallow'] ); 
  255. $r['allow'] = $this->opt_robot_rule( $r['allow'] ); 
  256. $options[ $prefix . 'robotgen' ] = $this->output_robots( $rules ); 
  257. $file2 = explode( "\n", $options[ $prefix . 'robotgen' ] ); 
  258. $options[ $prefix . 'robothtml' ] = '<table width=100%><tr><td valign=top width=45%>' . $this->annotate_robots_html( $file, true, __( "Current File", 'all-in-one-seo-pack' ) ) . '</td><td><span style="font-size: xx-large">→</span></td><td valign=top>' . $this->annotate_robots_html( $file2, true, __( "Proposed Changes", 'all-in-one-seo-pack' ) ) . '</td></tr></table>'; 
  259. } else { 
  260. $options[ $prefix . 'robothtml' ] = $this->annotate_robots_html( $file, true, __( "Current File", 'all-in-one-seo-pack' ) ); 
  261.  
  262. return $options; 
  263.  
  264. function do_robots( $options, $location ) { 
  265. if ( $location ) { 
  266. $prefix = $this->get_prefix( $location ) . $location . '_'; 
  267. if ( $location === 'generator' ) { 
  268. if ( isset( $_POST['Submit_Update'] ) || isset( $_POST['Submit_Opt_Update'] ) ) { 
  269. $this->save_files( Array( 'robotgen' => 'robots.txt' ), $prefix ); 
  270. } elseif ( isset( $_POST['Submit_Delete'] ) ) { 
  271. $this->delete_files( Array( 'robotgen' => 'robots.txt' ) ); 
  272.  
  273. function annotate_robots_html( $file, $show_help = false, $title = '' ) { 
  274. $robots = $this->annotate_robots( $file ); 
  275. if ( ! empty( $robots ) ) { 
  276. $buf = '<table class="widefat" ><thead>'; 
  277. if ( ! empty( $title ) ) { 
  278. $buf .= "<tr><th colspan=3>" . $title . "</th></tr>"; 
  279. $buf .= '<tr class="aioseop_optimize_thread">'; 
  280. $buf .= '<th style="width:5%;"></th><th style="width:78%;"><span class="column_label" >Parameter</span></th>'; 
  281. $buf .= '<th><span class="" >Status</span></th></tr></thead>'; 
  282. $buf .= "<tbody>"; 
  283.  
  284. foreach ( $robots as $r ) { 
  285. $class = 'robots'; 
  286. $status = "#9cf975"; 
  287. $help = ''; 
  288. if ( ! $r['valid'] || ! $r['strict'] ) { 
  289. if ( ! $r['strict'] ) { 
  290. $class .= ' quirks'; 
  291. $status = "yellow"; 
  292. if ( ! $r['valid'] ) { 
  293. $class .= ' invalid'; 
  294. $status = "#f9534a"; 
  295. if ( $show_help ) { 
  296. $help = '<a style="cursor:pointer;" class="' . $class . '" title="Click for Help!" onclick="toggleVisibility(\'aiosp_robots_main_legend_tip\');" title="Click for Help">' 
  297. . '<div class="aioseop_tip_icon"></div></a>'; 
  298. $buf .= "<tr class='entry-row {$class}'><td>{$help}</td><td><span class='entry_label'>{$r['content']}</td><td><div style='background:{$status};'></div></td></tr>"; 
  299. $buf .= '</tbody>'; 
  300.  
  301. $buf .= '</table>'; 
  302. if ( $show_help ) { 
  303. $buf .= '<div class="aioseop_option_docs" id="aiosp_robots_main_legend_tip"> 
  304. <h3>' . __( 'Legend', 'all-in-one-seo-pack' ) . '</h3> 
  305. <ul> 
  306. <li>' . __( 'The yellow indicator means that a non-standard extension was recognized; not all crawlers may recognize it or interpret it the same way. The Allow and Sitemap directives are commonly used by Google and Yahoo.', 'all-in-one-seo-pack' ) . '</li> 
  307. <li>' . __( 'The red indicator means that the syntax is invalid for a robots.txt file.', 'all-in-one-seo-pack' ) . '</li> 
  308. </ul> 
  309. <a target="_blank" rel="nofollow" href="http://wikipedia.org/wiki/Robots_exclusion_standard#Nonstandard_extensions">' . __( 'More Information', 'all-in-one-seo-pack' ) . '</a> 
  310. </div>'; 
  311. } else { 
  312. $buf = '<p class="aioseop_error_notice" ><strong>Your Robots.txt file is either empty, cannot be found, or has invalid data.</strong></p>'; 
  313.  
  314. return $buf; 
  315.  
  316. function annotate_robots( $robots ) { 
  317. $state = 0; 
  318. $rules = Array(); 
  319. foreach ( $robots as $l ) { 
  320. $l = trim( $l ); 
  321. if ( empty( $l[0] ) ) { 
  322. if ( $state > 1 ) { 
  323. $rules[] = Array( 
  324. 'state' => 0,  
  325. 'type' => 'blank',  
  326. 'content' => $l,  
  327. 'valid' => true,  
  328. 'strict' => true,  
  329. ); 
  330. $state = 0; 
  331. } elseif ( $l[0] === '#' ) { 
  332. if ( $state < 1 ) { 
  333. $state = 1; 
  334. $rules[] = Array( 
  335. 'state' => $state,  
  336. 'type' => 'comment',  
  337. 'content' => $l,  
  338. 'valid' => true,  
  339. 'strict' => true,  
  340. ); 
  341. } elseif ( stripos( $l, 'sitemap' ) === 0 ) { 
  342. $state = 2; 
  343. $rules[] = Array( 
  344. 'state' => $state,  
  345. 'type' => 'sitemap',  
  346. 'content' => $l,  
  347. 'valid' => true,  
  348. 'strict' => false,  
  349. ); 
  350. } elseif ( stripos( $l, 'crawl-delay' ) === 0 ) { 
  351. $state = 3; 
  352. $rules[] = Array( 
  353. 'state' => $state,  
  354. 'type' => 'crawl-delay',  
  355. 'content' => $l,  
  356. 'valid' => true,  
  357. 'strict' => false,  
  358. ); 
  359. } elseif ( stripos( $l, 'user-agent' ) === 0 ) { 
  360. $state = 3; 
  361. $rules[] = Array( 
  362. 'state' => $state,  
  363. 'type' => 'user-agent',  
  364. 'content' => $l,  
  365. 'valid' => true,  
  366. 'strict' => true,  
  367. ); 
  368. } elseif ( stripos( $l, 'useragent' ) === 0 ) { 
  369. $state = 3; 
  370. $rules[] = Array( 
  371. 'state' => $state,  
  372. 'type' => 'user-agent',  
  373. 'content' => $l,  
  374. 'valid' => true,  
  375. 'strict' => false,  
  376. ); 
  377. } elseif ( stripos( $l, 'disallow' ) === 0 ) { 
  378. if ( $state < 3 ) { 
  379. $rules[] = Array( 
  380. 'state' => $state,  
  381. 'type' => 'disallow',  
  382. 'content' => $l,  
  383. 'valid' => false,  
  384. 'strict' => false,  
  385. ); 
  386. continue; 
  387. $state = 3; 
  388. $rules[] = Array( 
  389. 'state' => $state,  
  390. 'type' => 'disallow',  
  391. 'content' => $l,  
  392. 'valid' => true,  
  393. 'strict' => true,  
  394. ); 
  395. } elseif ( stripos( $l, 'allow' ) === 0 ) { 
  396. if ( $state < 3 ) { 
  397. $rules[] = Array( 
  398. 'state' => $state,  
  399. 'type' => 'allow',  
  400. 'content' => $l,  
  401. 'valid' => false,  
  402. 'strict' => false,  
  403. ); 
  404. continue; 
  405. $state = 3; 
  406. $rules[] = Array( 
  407. 'state' => $state,  
  408. 'type' => 'allow',  
  409. 'content' => $l,  
  410. 'valid' => true,  
  411. 'strict' => false,  
  412. ); 
  413. } else { 
  414. $rules[] = Array( 
  415. 'state' => $state,  
  416. 'type' => 'unknown',  
  417. 'content' => $l,  
  418. 'valid' => false,  
  419. 'strict' => false,  
  420. ); 
  421.  
  422. return $rules; 
  423.  
  424. function parse_annotated_robots( $robots ) { 
  425. $state = 0; 
  426. $rules = Array(); 
  427. $opts = Array( 'sitemap', 'crawl-delay', 'user-agent', 'allow', 'disallow', 'comment' ); 
  428. $rule = Array(); 
  429. foreach ( $opts as $o ) { 
  430. $rule[ $o ] = Array(); 
  431. $blank_rule = $rule; 
  432. foreach ( $robots as $l ) { 
  433. switch ( $l['type'] ) { 
  434. case 'blank': 
  435. if ( $state >= 1 ) { 
  436. if ( ( $state === 1 ) && ( empty( $rule['user-agent'] ) ) ) { 
  437. $rule['user-agent'] = Array( null ); 
  438. $rules[] = $rule; 
  439. $rule = $blank_rule; 
  440. continue; 
  441. case 'comment': 
  442. $rule['comment'][] = $l['content']; 
  443. continue; 
  444. case 'sitemap': 
  445. $rule['sitemap'][] = trim( substr( $l['content'], 8 ) ); 
  446. break; 
  447. case 'crawl-delay': 
  448. $rule['crawl-delay'][] = trim( substr( $l['content'], 12 ) ); 
  449. break; 
  450. case 'user-agent': 
  451. if ( $l['strict'] ) { 
  452. $ua = trim( substr( $l['content'], 11 ) ); 
  453. } else { 
  454. $ua = trim( substr( $l['content'], 10 ) ); 
  455. $rule['user-agent'][] = $ua; 
  456. break; 
  457. case 'disallow': 
  458. if ( $l['valid'] ) { 
  459. $rule['disallow'][] = trim( substr( $l['content'], 9 ) ); 
  460. break; 
  461. continue; 
  462. case 'allow': 
  463. if ( $l['valid'] ) { 
  464. $rule['allow'][] = trim( substr( $l['content'], 6 ) ); 
  465. break; 
  466. continue; 
  467. case 'unknown': 
  468. default: 
  469. $state = $l['state']; 
  470. if ( ( $state === 1 ) && ( empty( $rule['user-agent'] ) ) ) { 
  471. $rule['user-agent'] = Array( null ); 
  472. if ( $state >= 1 ) { 
  473. $rules[] = $rule; 
  474.  
  475. return $rules; 
  476.  
  477. function parse_robots( $robots ) { 
  478. return $this->parse_annotated_robots( $this->annotate_robots( $robots ) ); 
  479.  
  480. function get_robot_user_agents( $rules ) { 
  481. $opts = Array( 'sitemap', 'crawl-delay', 'user-agent', 'allow', 'disallow', 'comment' ); 
  482. $user_agents = Array(); 
  483. foreach ( $rules as $r ) { 
  484. if ( ! empty( $r['sitemap'] ) && empty( $r['user-agent'] ) ) { 
  485. $r['user-agent'] = Array( null ); 
  486. foreach ( $r['user-agent'] as $ua ) { 
  487. if ( ! isset( $user_agents[ $ua ] ) ) { 
  488. $user_agents[ $ua ] = Array(); 
  489. foreach ( $opts as $o ) { 
  490. if ( ! isset( $user_agents[ $ua ][ $o ] ) ) { 
  491. $user_agents[ $ua ][ $o ] = $r[ $o ]; 
  492. } else { 
  493. $user_agents[ $ua ][ $o ] = array_merge( $user_agents[ $ua ][ $o ], $r[ $o ] ); 
  494.  
  495. return $user_agents; 
  496.  
  497. function flatten_user_agents( $user_agents ) { 
  498. $rules = Array(); 
  499. foreach ( $user_agents as $ua => $r ) { 
  500. $r['user-agent'] = Array( $ua ); 
  501. $rules[] = $r; 
  502.  
  503. return $rules; 
  504.  
  505. function opt_robot_rule( $dis ) { 
  506. if ( is_array( $dis ) ) { // unique rules only 
  507. $dis = array_unique( $dis, SORT_STRING ); 
  508. $pd = null; 
  509. foreach ( $dis as $k => $d ) { 
  510. $d = trim( $d ); 
  511. if ( ! empty( $pd ) && ! empty( $d ) ) { 
  512. if ( strpos( $d, $pd ) === 0 ) { 
  513. unset( $dis[ $k ] ); 
  514. continue; // get rid of subpaths of $pd 
  515. $l = strlen( $d ); 
  516. if ( ( $l > 0 ) && ( $d[ $l - 1 ] !== '/' ) ) { 
  517. continue; 
  518. $pd = $d; // only allow directory paths for $pd 
  519.  
  520. return $dis; 
  521.  
  522. function output_robots( $rules ) { 
  523. $robots = ''; 
  524. foreach ( $rules as $r ) { 
  525. foreach ( $r['comment'] as $c ) { 
  526. $robots .= "$c\n"; 
  527. foreach ( $r['user-agent'] as $u ) { 
  528. if ( $u != '' ) { 
  529. $robots .= "User-agent: $u\n"; 
  530. foreach ( $r['crawl-delay'] as $c ) { 
  531. $robots .= "Crawl-Delay: $c\n"; 
  532. foreach ( $r['allow'] as $a ) { 
  533. $robots .= "Allow: $a\n"; 
  534. foreach ( $r['disallow'] as $d ) { 
  535. $robots .= "Disallow: $d\n"; 
  536. foreach ( $r['sitemap'] as $s ) { 
  537. $robots .= "Sitemap: $s\n"; 
  538. $robots .= "\n"; 
  539.  
  540. return $robots;