#!../src/tops -i -s ../sys -u drivers
/*
Program Tops - a stack-based computing environment
Copyright (C) 1999-2011  Dale R. Williamson

Author: Dale R. Williamson <dale.williamson@prodigy.net>

File test/cluster  March 2004

Note that usrpath is directory test/drivers.  Script drivers/tserv that
is run to start every node has a generic #! line just for this demo.

Tue Mar 15 21:36:05 PDT 2011.  Word cluster_start modified to start the
nodes as SSL servers if flag SSL_CONNECT is set.  SSL certificates are
at usrpath, directory test/drivers.

June 2005.  This file using infix text to run the new infix parser was 
created; the postfix version of this file is usr/cluster_postfix.

------------------------------------------------------------------------

   Running the Make-pi demo of file clu.v.

   In directory tops/test, this file runs from the Unix prompt using:

      % cluster

   or from the program's ready prompt (also running in tops/test) with:

      ready > 'cluster' psource

   When run from the interactive ready prompt, the node windows remain 
   connected and in view (if X11), and are handy for further study of 
   network commands (word clients shows their connections).

   If the node windows remain connected and visible, by running as
   noted above, then their keyboards are enabled by the following 
   phrase from the interactive window:

      "syspath 'key.v' pathcat source" cluster_run

   This phrase is automatically run at the end of this demo when flag 
   keys? is true, that is, when the demo is being run interactively.
 
----------------------------------------------------------------------*/

   if(missing("CONNECT"))
      HALT(nl(
      dot(" cluster: words for networking not present, test skipped")));

   if(missing("USE_PORT")) source("clu.v");

   this_file="cluster";
   msource(this_file,"Words new."); // sourcing new words below 

//----------------------------------------------------------------------

// Running the Make-pi demo.

   nl();
   cluster_start(3,1);
   clients();
   remoteack.SEC=5; // give nodes 5 seconds to acknowledge the head node

   if(cluster_ack()) // acknowledgment from all the nodes
      nl(Ready(nl(dot(" All nodes are connected"))));
   else (
      clients(),
      dot(" Connecting to all nodes failed;"),
      dot(" the following sockets are connected:"), nl(),

      dot(mtext(yank("cluster_ack","ACK")')), nl(),

      dot(" cluster: in file cluster, try increasing SEC"),
      dot(" in word cluster_start"), nl(),

      halt(nl(dot(" cluster: halting")))
   );

   nl(dot(" Node names, sockets, and ports:"));
   dot(indent(cluster_props(),2));
   pause;
   nontrace();

   nl(dot(nl(" Preparing to run the Make-pi demo")));
   msource("clu.v","Make-pi demo.");

// If interactive, run whos on the nodes and on the head node:
   if(keys?) whos(Ready(cluster_run("whos")));
   pause;
   
   if(!cluster_ack()) // is everybody ready?
      HALT(nl(dot(" cluster: nodes are not ready; demo halting")));

   cluster_run("wtrace"); // activate trace on all nodes

   INIT((Pieces=32)); // make pi from this many pieces

   nl(dot(" Integrating f(x)=4/(1 + x^2) over these " + 
      intstr(Pieces) + " intervals from 0 to 1:"));
   .m(make_piece.RANGES); 

   dot(nl," Cluster is running the Make-pi demo");
   dot(nl,
      " Processing is purposely slowed to make every X11 window "
      + "active");
   if(!X11) dot(nl,
      " Since this machine is not running X11, please standby ...");
   nl;

   cluster_source("make_piece"); // running while head node waits

/* Word awhile (file clu.v) flags when pi is done.  Meanwhile, for this
   demo, the script needs to go into a wait state to keep from running 
   to completion.  Allow 30 seconds: */
   WAIT_INIT(30,"awhile"); // awhile will signal when wait is over
   WAIT_BEGIN(); // head node begins indeterminate wait state

// When here, pi is done. 

   dot(spaces(nl(2)));
   nl(dot(format(PI,"%16.14f") + " computed using "
      + intstr(Pieces) + " pieces for pi"));

   dot(spaces(2));
   nl(dot(format(pi,"%16.14f") + " is the program's pi"));

   if(keys?)
      Ready(cluster_run("nowtrace")); // turn trace off
   else
      cluster_close(nl(dot(" Cluster closing")));

   nl(dot(nl(" End of cluster demo")));

<<
   keys? 
   IF \ Keyboards for all: 
      "syspath 'key.v' pathcat source" cluster_run
   ELSE
      " Cluster demo: the following socket messages are ok" . nl
   THEN
>>

   halt

------------------------------------------------------------------------

   Words new.
<<
   inline: cluster_start (N p --- ) \ start N nodes running processor p
{     Tue Mar 15 15:30:03 PDT 2011.  Modify for encrypted (SSL) servers.
      Note that client SSL certification must be at usrpath, which is 
      defined to be drivers on the #! line above.

      For this demo, this word replaces the version of cluster_start
      for a real cluster in file clu.v, just sourced above.

      To act as a cluster of three nodes, three background servers are 
      started by running script usr/tserv.  Because this is a simula-
      tion on a single machine, each server is listening on a different
      port; node servers on a cluster can all listen on the same port 
      number since they are on separate machines. 

      In addition to starting the cluster nodes listening, this word 
      also connects the nodes to the head node by running word
      cluster_connect.

      The head node connects to the nodes using the machine's loopback 
      IP address.

      If some nodes fail to connect, try increasing SEC, the time for 
      the program to idle before trying to connect to the nodes.

      Increasing SEC gives more time for the node programs to start up 
      and get into listening mode as servers.
} 
      [ 5 "SEC" book 
        host "sigma" = IF 10 "SEC" book THEN \ this one needs more time
      ]

      "cluster_connect" "HOST" yank any? 
      IF (N p hHOST) 3 dump
         " cluster_start: cluster is running" dot nl return 
      THEN 

      (N p) 2drop \ N and p are not used in this demo

      def_port nextport "P1" book
      P1 1+ nextport "P2" book
      P2 1+ nextport "P3" book

      usrpath "tserv -port " catpath P3 intstr + " SSL &" + (q2)
      "SSL" SSL_CONNECT IF "-ssl" ELSE "" THEN strp (q2) 

      usrpath "tserv -port " catpath P2 intstr + " SSL &" + (q1)
      "SSL" SSL_CONNECT IF "-ssl" ELSE "" THEN strp (q1) 

      usrpath "tserv -port " catpath P1 intstr + " SSL &" + (q0)
      "SSL" SSL_CONNECT IF "-ssl" ELSE "" THEN strp (q0) 

      (q2 q1 q0)
      X11 
      IF "-geometry 50x8+0+0"   "nodewin" "GEO" bank nodewin
         "-geometry 50x8+0+150" "nodewin" "GEO" bank nodewin
         "-geometry 50x8+0+300" "nodewin" "GEO" bank nodewin
      ELSE \ running without windows
         shell 
         shell 
         shell
      THEN 

    \ Running locally on loopback IP address:
      IPloop spaced P1 intstr +
      IPloop spaced P2 intstr +
      IPloop spaced P3 intstr +
      3 pilen "JOBS" book

      SEC " Idle " that intstr " seconds while cluster nodes start..." 
      + + dot idle
      " done" dot nl

      " Connecting head node to cluster nodes..." dot nl
      cluster_connect 
      " done" dot nl

   end

   inline: Ready ( --- ) \ nodes at attention
      X11 
      IF "nl 'Ready ' dot" (qS)          \ show Ready message on node
      ELSE "'/dev/null' set_sysout" (qS) \ show no node text
      THEN (qS) cluster_run              \ broadcast S to all to run
   end
>>
