# # Makefile for Safari perllib project # Copyright (c) 1999 by Barrie Slaymaker, rbs@telerama.com # # You may distribute under the terms of either the GNU General Public # License or the Artistic License, as specified in the README file. # # A few notes: saf_http_out $(EXPIRES) and a few others auto-create directories # as needed. # .SUFFIXES: #.SECONDARY: ## ## How soon caches should expire pages from the head revision, in seconds ## #HEAD_EXPIRES=-e 300 # ## ## How soon other pages should expire. HTTP spec recommends 1 year. ## That's a lot of seconds. Our http tree should be updated periodically ## during that year in order to tweak the Expires tags so that we don't ## end up with pages that become uncachable in a year. Sounds like a ## job for a perl script that tweaks the files, then restores ther mtimes ## so make won't get fooled in to thinking they're newer than they are. ## #LONG_EXPIRES=-e 31536000 # ## ## When to expire based on whether or not the head revision's being asked ## for. ## #ifeq ($(SAV_REV),_head) #EXPIRES=$(HEAD_EXPIRES) #else #EXPIRES=$(LONG_EXPIRES) #endif EXPIRES= # # Some redirects to make it easy to endy up in the right place in the # tree # http_dir/index.html: echo "Forwarding..." |\ saf_http_out $(EXPIRES) -h ":loc: _head/Default/depot/" -o "$@" http_dir/$(SAF_REV)/index.html: echo "Forwarding..." |\ saf_http_out $(EXPIRES) -h ":loc: Default/depot/" -o "$@" # # The perl p4d has only the original, default depot. Even though p4_ls # is cool with that, it's nice to the users to redirect them to # the root of that depot. # http_dir/$(SAF_REV)/$(SAF_FILTER)/index.html: echo "Forwarding..." |\ saf_http_out $(EXPIRES) -h ":loc: depot/" -o "$@" # # Directory listings # # These are the same no matter what the filter spec is. # #http_dir/$(SAF_REV)/$(SAF_FILTER)/index.html: co_dir/$(SAF_REV)/index.html # saf_http_out $(EXPIRES) "$<" -o "$@" http_dir/$(SAF_REV)/$(SAF_FILTER)/%/index.html: co_dir/$(SAF_REV)/%/index.html saf_http_out $(EXPIRES) "$<" -o "$@" ## ## Filters ## # # Normal files # .PRECIOUS: \ http/$(SAF_REV)/plain/%\ http/$(SAF_REV)/pretty/%\ http/$(SAF_REV)/ChLines/%\ http/$(SAF_REV)/None/%\ http/$(SAF_REV)/NoMenus/%\ http/$(SAF_REV)/Default/%\ http/$(SAF_REV)/ChLines/%: co/$(SAF_REV)/% saf_p4pr "//$*$(SAF_P4_REV)" |\ saf_http_out -s "$<" $(EXPIRES) --file-type=text -o "$@" http/$(SAF_REV)/plain/%: co/$(SAF_REV)/% saf_http_out -s "$<" $(EXPIRES) --PRE "$<" -o "$@" http/$(SAF_REV)/pretty/%: co/$(SAF_REV)/% { \ export LANGCODE=`pfile --follow --fields=type "$<"` ; \ echo "$$LANGCODE" ; \ unset GATEWAY_INTERFACE ; code2html -t 8 --linknumbers $$LANGCODE "$<" | \ saf_http_out -s "$<" $(EXPIRES) -o "$@" \ } || \ saf_http_out -s "$<" $(EXPIRES) "$<" -o "$@" http/$(SAF_REV)/NoMenus/%: co/$(SAF_REV)/% saf_http_out -s "$<" $(EXPIRES) --noedit "$<" -o "$@" http/$(SAF_REV)/None/%: co/$(SAF_REV)/% saf_http_out -s "$<" $(EXPIRES) "$<" -o "$@" #http/$(SAF_REV)/POD/%.perl: POD/$(SAF_REV)/%.html # perl -pe 's@(HREF="[^/][^:]*)\.html"@$$1.pm"@ig' "$<" |\ # saf_http_out $(EXPIRES) -o "$@" # #http/$(SAF_REV)/POD/%.pod: POD/$(SAF_REV)/%.html # perl -pe 's@(HREF="[^/][^:]*)\.html"@$$1.pm"@ig' "$<" |\ # saf_http_out $(EXPIRES) -o "$@" # #http/$(SAF_REV)/POD/%.pm: POD/$(SAF_REV)/%.html # perl -pe 's@(HREF="[^/][^:]*)\.html"@$$1.pm"@ig' "$<" |\ # saf_http_out $(EXPIRES) -o "$@" # #http/$(SAF_REV)/POD/%.pl: POD/$(SAF_REV)/%.html # perl -pe 's@(HREF="[^/][^:]*)\.html"@$$1.pm"@ig' "$<" |\ # saf_http_out $(EXPIRES) -o "$@" # # # Links in podtohtml output pages point to .html files, so redirect these # to point to a .pm file... I leave it here as an example of a redirect. # Note that we need to put some text in the body to keep cgimake from # complaining about unmade targets... # # # pod2html and podtohtml are both buggy. pod2html also only produces # absolute links. I'm using pod2html and making sure the links are # hardcoded (which prevents you from relocating the resulting docset) # because it fails but generates output. It's not a batch converter # while podtohtml is. # http/$(SAF_REV)/POD/%: co/$(SAF_REV).saf-all-files mkpath "$@" pod2html "--htmlroot=POD/$(SAF_REV)" "--infile=co/$(SAF_REV)/$*" \ "--htmlroot=/safaridev/perl/$(SAF_REV)/POD/" | \ saf_http_out -s "co/$(SAF_REV)/$*" $(EXPIRES) -o "$@" #POD/$(SAF_REV)/%: co/$(SAF_REV).saf-all-files # podtohtml -d "POD/$(SAF_REV)" -i "POD/$(SAF_REV)/index.html"\ # co/$(SAF_REV) # # # Preferred view # # If make was invoked by cgimake or the command line, MAKELEVEL will be 0. # in this case, we make sure the base file is checked out and do a recursive # make. # # In the recursive make MAKELEVEL is set to 1 by make. In this case, we # analyze the base file and figure out what view is preferred, then # build that view and link to it. # ifeq ($(MAKELEVEL),0) http/$(SAF_REV)/Default/%: co/$(SAF_REV)/% $(MAKE) -f $(SAF_CONF_DIR)/Makefile \ --no-print-directory http/$(SAF_REV)/Default/$* else SAF_FILTER=${shell preferred_view co/$(SAF_REV)/$(SAF_FILE) } http/$(SAF_REV)/Default/%: http/$(SAF_REV)/$(SAF_FILTER)/% mkpath "$@" -rm -f "$@" ln -s "${shell pwd}/$<" "$@" endif # # Server-wide (ie not per-file) Reports # .PHONY: http/_head/changes/index.html http/_head/labels.html http/_head/changes/index.html: p4_changes cat p4_changes | \ p4_changes_2_html | \ saf_http_out $(EXPIRES) -o "$@" http/_head/changes/%.html: p4 describe -du "$*" | \ p4d2p | \ p4_desc_2_html | \ saf_http_out $(EXPIRES) -o "$@" http/_head/labels/index.html: p4_labels cat p4_labels | \ p4_labels_2_html | \ saf_http_out $(EXPIRES) -o "$@" http/_head/filelog/%: p4_filelog "//$*" | \ saf_http_out $(EXPIRES) -o "$@" # # Handle tool views # #http/$(SAF_REV)/perl_-c/%: co/$(SAF_REV)/% # perl -c "$<" 2>&1 | \ # saf_http_out $(EXPIRES) -o "$@" # #http/$(SAF_REV)/perl_-cw/%: co/$(SAF_REV)/% # perl -cw "$<" 2>&1 | \ # saf_http_out $(EXPIRES) -o "$@" # http/$(SAF_REV)/Download/%: co/$(SAF_REV)/% saf_http_out -s "$<" $(EXPIRES) --mime-type=application/octet-stream\ "$<" -o "$@" # # gcc doesn't seem to return an exit code if -Wall finds anything, so # we go to a lot of effort to interpret the results # http/$(SAF_REV)/gcclint/%: co/$(SAF_REV)/% cd `dirname "$<"` ; \ gcclint `basename "$<"` > /tmp/gcclint.html saf_http_out -s "$<" /tmp/gcclint.html $(EXPIRES) -o "$@" # NEEDS WORK!! http/$(SAF_REV)/tar/%.tar.gz: co/%/p4_files get_all "$<" "co/$*" cd co ; tar czf "$(SAF_PROJECT)-$*.tar.gz" "$*" cat "co/$(SAF_PROJECT)-$*.tar.gz" |\ saf_http_out $(EXPIRES) -h ":c-t: application/gzip" -o "$@" http/$(SAF_REV)/wc/%: co/$(SAF_REV)/% cd co ; \ wc "$(SAF_REV)/$*" |\ perl -ane 'printf( "%d lines, %d words, %d bytes in %s\n", @F )' | \ saf_http_out -s "$<" $(EXPIRES) -o "../$@" ## ## source cache: we cache files from the depot locally in order to ## keep from hitting the depot for each filter or view change ## # # The p4_.... scripts clean up files specified with -o if there's an # error. So we can use .PRECIOUS to ask make to keep them around instead # of deleting them/ Eventually .SECONDARY should be powerful enough to # use here. # .PRECIOUS: co_dir/$(SAF_REV)/index.html\ co_dir/$(SAF_REV)/%/index.html\ co/$(SAF_REV)/% co_dir/$(SAF_REV)/index.html: p4_depots p4_ls --depot-list=p4_depots "//*" -o "$@" co_dir/$(SAF_REV)/%/index.html: co_dir/$(SAF_REV)/p4_files p4_ls "--file-list=$<" "//$*/*" -o "$@" co/$(SAF_REV)/%: co_dir/$(SAF_REV)/p4_files mkpath "co/" cd co ; p4_get "-l=../$<" "-r=$(SAF_REV)" -o - "$*" # This rule updates the head revision if a new change number exists ifeq ($(SAF_TARGET),/_head/update) SAF_REV=@$(shell p4 changes -m 1 | sed 's/Change \([0-9]*\).*/\1/' ) co/_head/update: co/$(SAF_REV).saf-all-files mkpath "$@" echo "Update results:" > /tmp/perl_update for FILE in p4_* ; do rm -f $FILE >> /tmp/perl_update 2>&1 ; done ln -n -f -s "$(SAF_REV)" "co/_head" >> /tmp/perl_update 2>&1 -mv /tmp/perl_update "$@" http/_head/update: co/_head/update saf_http_out "$<" --file-type=text -o "$@" endif # # A special rule that is used to update all files in a revision # co/$(SAF_REV).saf-all-files: co_dir/$(SAF_REV)/p4_files mkpath "$@" { cd co ; p4_get "-l=../$<" "-r=$(SAF_REV)" /... } && touch "../$@" # # Utility functions... # .PHONY: http/clean http/clean: # Assume no two people can execute this target at once, which # is guaranteed by cgimake rm -rfv http http_dir co_dir > /tmp/perlclean 2>&1 saf_http_out $(EXPIRES) /tmp/perlclean -o "$@" .PHONY: %/envcheck %/envcheck: saf_http_out $(EXPIRES) -o "$@" # # Rebuild the backend data files and the head revision data files # .PHONY: http/rebuild http/rebuild: rm -f p4_* > "$@" rm -rf co_dir/_head >> "$@" $(MAKE) "-f$(SAF_CONF_DIR)/Makefile" p4_depots co_dir/_head/p4_files >> "$@" # # Cached output of some p4 commands. Note that we need to update # the head revision whenever a change is submitted. A cron job can # do that by "make http/rebuild" using cgimake, we hope. # .PRECIOUS: co_dir/%/p4_files co_dir/%/p4_files: mkpath "$@" p4 files "//...$(SAF_P4_REV)" > "$@" || rm "$@" # # Don't cache these: they change periodically # p4_depots: p4 depots > "$@" || rm "$@" p4_changes: p4 changes -l -m100 > "$@" || rm "$@" p4_labels: p4 labels > "$@" || rm "$@"
# | Change | User | Description | Committed | |
---|---|---|---|---|---|
#7 | 195 | Barrie Slaymaker | Added filters NoMunes, None (replaces HTML), tool Download. | ||
#6 | 194 | Barrie Slaymaker |
Tweaked rules to allow a cron job or web request to /project/_head/update to update a project to the head revision. |
||
#5 | 187 | Barrie Slaymaker |
Added lots of options to saf_http_out to let the source file be used to figure out file type for determining filter and tool menus. Changed Makefiles to pass the original source file name in to saf_http_out. Added --PRE to saf_http_out. This is good for displaying plain text versions of HTML files. Improved the tool and filter logic in Safari::Edit::p4.pm to work with original file type to determine what makes sense for what kind of file. This really needs to be generalized in to a config file instead of being buried in a module. Increased usage of HTML::Entities::encode_entities instead of s///g . Moved Content-type: header to after the edit() routine call and now we pass and recover mime-type as an option. Added SAF_TARGET to environment under cgimake, Fixed updirectory counting, which broke when cgimake started parsing the project name from PATH_INFO. Removed all edit routines from saf_http_out.conf files. NOTE: we can now fall back to a single cgimake.conf file. Any day now. |
||
#4 | 178 | Barrie Slaymaker |
Modified cgimake to get the project name out of the target name if one isn't supplied. This makes it so that Apache's mod_rewrite is no longer needed to extract the project name from the URL and place it in the QUERY_STRING, and so that you can call cgimake from the command line and place the project name in the target path: cgimake /perl/_head/Default/depot/ Modified cgimake to work easily from the command line Fixed some minor bugs in assembling paths that were causing // to appear in paths when no project is specified. Fixed minor bug that cause cgimake to try to read a bogus config file when there is no $project Tweaked p4_get to provide a more reasonable level of verbosity. Updated the apache doc to reflect the simpler, non-rewrite technique. Added targets to fetch a new _head revision if the head change number has changed. Need to check in p4_update. |
||
#3 | 168 | Barrie Slaymaker | Added YAPC paper, slides | ||
#2 | 165 | Barrie Slaymaker | Applied Greg KH's license patch. | ||
#1 | 162 | Barrie Slaymaker | First code & documentation checkin |