1 #! /usr/perl5/bin/perl
   2 #
   3 # CDDL HEADER START
   4 #
   5 # The contents of this file are subject to the terms of the
   6 # Common Development and Distribution License (the "License").
   7 # You may not use this file except in compliance with the License.
   8 #
   9 # You can obtain a copy of the license at usr/src/OPENSOLARIS.LICENSE
  10 # or http://www.opensolaris.org/os/licensing.
  11 # See the License for the specific language governing permissions
  12 # and limitations under the License.
  13 #
  14 # When distributing Covered Code, include this CDDL HEADER in each
  15 # file and include the License file at usr/src/OPENSOLARIS.LICENSE.
  16 # If applicable, add the following below this CDDL HEADER, with the
  17 # fields enclosed by brackets "[]" replaced with your own identifying
  18 # information: Portions Copyright [yyyy] [name of copyright owner]
  19 #
  20 # CDDL HEADER END
  21 #
  22 
  23 #
  24 # Copyright 2009 Sun Microsystems, Inc.  All rights reserved.
  25 # Use is subject to license terms.
  26 #
  27 
  28 #
  29 # Generate README.opensolaris from a template for inclusion in a
  30 # delivery wad.
  31 # Usage: mkreadme_osol README.opensolaris < template
  32 #
  33 
  34 use strict;
  35 use warnings;
  36 
  37 #
  38 # Timeout and retry settings for wget.  Allow one retry for the
  39 # occasional glitch, but don't wait longer than 5 minutes (so as not
  40 # to hold up the build).
  41 #
  42 my $timeout = 150;
  43 my $tries = 2;
  44 
  45 #
  46 # Markers in the web pages that we download.
  47 #
  48 my $begin_data = qr/\[begin README tag - do not delete\]/;
  49 my $end_data = qr/\[end README tag - do not delete\]/;
  50 
  51 my $readme_fn = shift || die "missing README filepath\n";
  52 open(README_OUT, ">$readme_fn") || die "couldn't open $readme_fn\n";
  53 my @lines = <STDIN>;
  54 
  55 my %content;
  56 
  57 if (! $ENV{"HTTP_PROXY"}) {
  58         if ($ENV{"http_proxy"}) {
  59                 $ENV{"HTTP_PROXY"} = $ENV{"http_proxy"};
  60         } else {
  61                 $ENV{"HTTP_PROXY"} = "http://webcache.sfbay:8080";
  62         }
  63 }
  64 if (! $ENV{"http_proxy"}) {
  65         $ENV{"http_proxy"} = $ENV{"HTTP_PROXY"};
  66 }
  67 
  68 #
  69 # Make a pass through the input file and download any web pages that
  70 # are included by reference.
  71 #
  72 foreach (@lines) {
  73         chomp;
  74         if (/^<!-- #include (.+) -->$/) {
  75                 my $url = $1;
  76                 print "Getting $url\n";
  77                 # Download the page into $content{$url}.
  78                 $content{$url} =
  79                     `/usr/sfw/bin/wget -q -O - -T $timeout -t $tries $url`;
  80                 if (! $content{$url}) {
  81                         die "$url: invalid or empty URI.\n";
  82                 }
  83                 #
  84                 # Clean up the downloaded contents: remove carriage
  85                 # returns, strip out content that is outside the
  86                 # delimiter tags, convert HTML-encoded characters back
  87                 # into plain text.
  88                 #
  89                 $content{$url} =~ s/\r//g;
  90                 my @c = split /\n/, $content{$url};
  91                 my $l;
  92                 # Work forwards to find start.
  93                 while (defined ($l = shift @c)) {
  94                         if ($l =~ /$begin_data/) {
  95                                 last;
  96                         }
  97                 }
  98                 if (! defined $l) {
  99                         print "Warning: content start delimiter not found\n";
 100                 } else {
 101                         # Work backwards to find end.
 102                         while (defined ($l = pop @c)) {
 103                                 if ($l =~ /$end_data/) {
 104                                         last;
 105                                 }
 106                         }
 107                         if (! defined $l) {
 108                                 print "Warning: content end delimiter ",
 109                                     "not found\n";
 110                         }
 111                 }
 112                 $content{$url} = join "\n", @c;
 113                 $content{$url} =~ s/&amp;/&/g;
 114                 $content{$url} =~ s/&lt;/</g;
 115                 $content{$url} =~ s/&#60;/</g;
 116                 $content{$url} =~ s/&gt;/>/g;
 117                 $content{$url} =~ s/&#62;/>/g;
 118         }
 119 }
 120 
 121 #
 122 # Make a second pass through the input file.  Pass most text on
 123 # verbatim; replace #include directives with the content that was
 124 # downloaded by the previous pass.
 125 #
 126 foreach (@lines) {
 127         if (/^<!-- #include (.+) -->$/ && exists($content{$1})) {
 128                 print README_OUT $content{$1};
 129         } else {
 130                 print README_OUT "$_\n";
 131         }
 132 }
 133 
 134 print README_OUT "\n\n";
 135 close(README_OUT);