www.pudn.com > rnn.rar > rnn.cc, change:1998-07-28,size:21558b


#include "rnn.hh"

/* Internal Function */
void RNN::free_fmat(float **data,int row, int col)
{
  delete data[0];
  delete data;
}

/* Internal Function */
void RNN::free_cmat(u_char **data,int row, int col)
{
  delete data[0];
  delete data;
}

/* Internal Function */
float ** RNN::fmat(int row, int col)
{
  float **T;
  int i,j;
  T = new float *[row];
  T[0] = new float [row*col];
  memset(T[0],0,row*col*sizeof(float));
  for (i=1;i<row;i++) 
    T[i] = &(T[0][i*col]);
  return(T);
}

/* Internal Function */
u_char ** RNN::cmat(int row, int col)
{
  u_char **tmpN;
  int i,j;
  tmpN = new u_char *[row];
  tmpN[0] = new u_char [row*col];
  memset(tmpN[0],0,row*col*sizeof(u_char));
  for (i=0;i<row;i++) 
    tmpN[i] = &(tmpN[0][i*col]);
  return(tmpN);
}

/*
Internal Function

Function: new_rnn
Purpose: initializes a neural network
Arguments:
  ip: number of "input" neurons (neurons which accept inputs)
  hn: number of "hidden" neurons (neurons accepting no i/o)
  op: number of "output" neurons (neurons which produce o/p)
  ETA: learning rate
  RATE: rate of o/p neuron firing
  rnntype: select topology of rnn
    0: feed-forward (optimized for speed)
    1: fully topologically recurrent 
    2: undefined topology which must be defined using connect
*/

void RNN::new_rnn(int ip, int hn, int op, float ETA, float RATE, u_char rnntype)
{
  int i,j;
  N = ip+hn+op;
  IP = ip;
  HN = hn;
  OP = op;
  eta = ETA;
  rate = RATE;
  RNNTYPE = rnntype;
  weights_changed = 1;
  wp = fmat(N,N);
  wm = fmat(N,N);
  w = fmat(N,N);
  for (i=0;i<N;i++)
    w[i][i] = 1.0;					    
  C = cmat(N,(int)((N+7)/8.0));
  GOP = new float[N];
  num = new float[N];
  memset(GOP,0,N*sizeof(float));
  typ = new u_char[N];
  for (i=0;i<N;i++)
    {
      if (i < IP) typ[i] = 1;
      else if (i < IP + HN) typ[i] = 2;
      else typ[i] = 3;
    }
  q = new float[N];
  memset(q,0,N*sizeof(float));
  den = new float[N];
  memset(den,0,N*sizeof(float));
  r = new float[N];
  memset(r,0,N*sizeof(float));
  lp = new float[N];
  memset(lp,0,N*sizeof(float));
  lm = new float[N];
  memset(lm,0,N*sizeof(float));
  if (RNNTYPE == 0) 
    {
      for (i=0;i<N;i++)
	for (j=0;j<N;j++)
	  if (typ[i] + 1 == typ[j]) connect(i,j);
    }
  else if (RNNTYPE == 1)
    {
      for (i=0;i<N;i++)
	for (j=0;j<N;j++)
	  if (i != j) connect(i,j);
    }
  else {
  }
  randomweights();
}

/* Internal Function */
void RNN::ff_inv()
{
  int i,j,k;
  float z;
  for (i=0;i<IP;i++)
    for (j=IP;j<IP+HN;j++)
      w[i][j] = (wp[i][j] - wm[i][j]*q[j])/den[j];
  for (i=IP;i<IP+HN;i++)
    for (j=IP+HN;j<N;j++)
      w[i][j] = (wp[i][j] - wm[i][j]*q[j])/den[j];
  for (i=0;i<IP;i++) 
    for (j=IP+HN;j<N;j++) {
      z = 0;
      for (k=IP;k<IP+HN;k++)
	z+= w[i][k] * w[k][j];
      w[i][j] = z;
    }
}

/* Internal Function */
void RNN::gen_inv()
{
  int r,c,i,st,j,k,rn;
  float z,z2;
  float **x,**y;
  
  x = fmat(N,N);
  y = fmat(N,N);
  
  for (c=0;c<N;c++)
    for (r=0;r<N;r++) {
      if (c == r) {
	y[r][c] = 1;
	x[r][c] = 1 - (wp[r][c] - wm[r][c]*q[c])/den[c];
	for (i=0;i<c;i++)
	  x[r][c] -= x[r][i]*y[i][c];
      }
      if (r > c) {
	x[r][c] = - (wp[r][c] - wm[r][c]*q[c])/den[c];
	for (i=0;i<c;i++)
	  x[r][c] -= x[r][i]*y[i][c];
      }
      if (r < c) {
	y[r][c] = - (wp[r][c] - wm[r][c]*q[c])/den[c];
	for (i=0;i<r;i++)
	  y[r][c] -= x[r][i]*y[i][c];
	y[r][c] = y[r][c] / x[r][r];
      }
    }
  
  for (rn=0;rn<N;rn++) {
    for (j=0;j<N;j++)
      {
	z = x[j][rn];
	z2 = y[j][N-1-rn];
	if (j > rn)
	  for (k=0;k<=rn;k++)
	    if (rn==k) x[j][k] = -z*x[rn][rn];
	    else x[j][k] -= z*x[rn][k];
	if (j < N-1-rn)
	  for (k=N-1-rn;k<N;k++)
	    if (N-1-rn == k) y[j][k] = -z2 * y[N-1-rn][N-1-rn];
	    else y[j][k] -= z2*y[N-1-rn][k];
      }
  }
  
  for (i=0;i<N;i++)
    for (j=0;j<N;j++) {
      z = 0;
      for (k=(i > j ? i : j);k<N;k++)
	z += x[k][j] * y[i][k];
      w[i][j] = z;
    }
  
  free_fmat(x,N,N);
  free_fmat(y,N,N);
}

/* Internal Function */
void RNN::ff_computews()
{
  int u,v,k,l;
  float *gp, *gm;
  float deltwp, deltwm;
  float tmp_wp, tmp_wm;
  float gpu, gpv, gmu, gmv;

  weights_changed = 1;
  for (u=0;u<IP;u++) {
    for (v=IP;v<IP+HN;v++) {
      gpu = -1.0 / den[u];
      gmu = -1.0 / den[u];
      gpv = 1.0 / den[v];
      gmv = -q[v]/den[v];
      tmp_wp = tmp_wm = 0;
      for (k=N-OP;k<N;k++)
	{
	  deltwp = gpu * q[u] * w[u][k];
	  deltwm = gmu * q[u] * w[u][k];
	  deltwp += gpv * q[u] * w[v][k];
	  deltwm += gmv * q[u] * w[v][k];
	  tmp_wp += (q[k]-GOP[k-(N-OP)])*deltwp;
	  tmp_wm += (q[k]-GOP[k-(N-OP)])*deltwm;
	}
      wp[u][v] -= eta*tmp_wp;
      wm[u][v] -= eta*tmp_wm;
      if (wp[u][v] < 0) wp[u][v] = 0;
      if (wm[u][v] < 0) wm[u][v] = 0;
    }
  }
  for (u=IP;u<IP+HN;u++) {
    for (v=IP+HN;v<N;v++) {
      gpu = -1.0 / den[u];
      gmu = -1.0 / den[u];
      gpv = 1.0 / den[v];
      gmv = -q[v]/den[v];
      tmp_wp = tmp_wm = 0;
      for (k=N-OP;k<N;k++)
	{
	  deltwp = gpu * q[u] * w[u][k];
	  deltwm = gmu * q[u] * w[u][k];
	  deltwp += gpv * q[u] * w[v][k];
	  deltwm += gmv * q[u] * w[v][k];
	  tmp_wp += (q[k]-GOP[k-(N-OP)])*deltwp;
	  tmp_wm += (q[k]-GOP[k-(N-OP)])*deltwm;
	}
      wp[u][v] -= eta*tmp_wp;
      wm[u][v] -= eta*tmp_wm;
      if (wp[u][v] < 0) wp[u][v] = 0;
      if (wm[u][v] < 0) wm[u][v] = 0;
    }
  }
}

/* Internal Function */
void RNN::gen_computews()
{
  int u,v,k,l;
  float *gp, *gm, deltwp, deltwm;
  float tmp_wm, tmp_wp;
  
  weights_changed = 1;
  gp = new float[N];
  gm = new float[N];
  for (u=0;u<N;u++) {
    for (v=0;v<N;v++)
      if (C[u][v/8] & 1 << (v%8))
        {
	  gp[u] = gp[v] = 0;
	  gm[u] = gm[v] = 0;
	  gp[u] += -1/den[u];
	  gm[u] += -1/den[u];
	  gp[v] += 1/den[v];
	  gm[v] += -q[v]/den[v];
	  tmp_wp = tmp_wm = 0;
          for (k=N-OP;k<N;k++)
	    {
	      deltwp = gp[u] * q[u] * w[u][k];
	      deltwm = gm[u] * q[u] * w[u][k];
	      deltwp += gp[v] * q[u] * w[v][k];
	      deltwm += gm[v] * q[u] * w[v][k];
	      tmp_wp += (q[k]-GOP[k-(N-OP)])*deltwp;
	      tmp_wm += (q[k]-GOP[k-(N-OP)])*deltwm;
	    }
	  wp[u][v] -= eta*tmp_wp;
	  wm[u][v] -= eta*tmp_wm;
          if (wp[u][v] < 0) wp[u][v] = 0;
          if (wm[u][v] < 0) wm[u][v] = 0;
        }
  }
  delete gp;
  delete gm;
}

/* Internal Function */
void RNN::genop()
{
  int i, j, k;
  int d;
  float *qp;
  
  qp = new float[N];
  for (i=0;i<N;i++)
    q[i] = 0;
  do
    {
      d = 1;
      for (i=0;i<N;i++)
        {
	  if (weights_changed) {
	    r[i] = 0;
	    for (j=0;j<N;j++)
	      r[i] += wp[i][j] + wm[i][j];
	    if (r[i] == 0) r[i] = rate;
	  }
          den[i] = r[i] + lm[i]*r[i];
          num[i] = lp[i] * r[i];
          for (j=0;j<N;j++)
	    {
	      num[i] += q[j] * wp[j][i];
	      den[i] += q[j] * wm[j][i];
	    }
        }
      for (i=0;i<N;i++) {
	qp[i] = num[i]/den[i];
	if (qp[i] > 1) qp[i] = 1;
	if (fabs(qp[i] - q[i]) > 0.003) d = 0;
	q[i] = qp[i];
      }
    } while (d == 0);
  delete qp;
  weights_changed = 0;
}

/* Internal Function */
void RNN::ffop()
{
  int i, j, k;
  int d,cnt = 0;
  int b1,b2,e1,e2;
  
  for (i=0;i<N;i++)
    {
      if (typ[i] == 1) {
        b1 = IP;
        e1 = IP+HN;
        b2 = 0;
        e2 = 0;
      }
      else if (typ[i] == 2) {
        e1 = N;
        b1 = IP+HN;
        b2 = 0;
        e2 = IP;
      }
      else if (typ[i] == 3) {
        b1 = 0;
        e1 = 0;
        b2 = IP;
        e2 = IP+HN;
      }
      if (weights_changed) {
        r[i] = 0;
        for (j=b1;j<e1;j++)
          r[i] += wp[i][j] + wm[i][j];
        if (r[i] == 0) r[i] = rate;
      }
      den[i] = r[i] + lm[i] * r[i];
      num[i] = lp[i] * r[i];
      for (j=b2;j<e2;j++)
        {
          num[i] += (float)q[j] * (float)wp[j][i];
          den[i] += (float)q[j] * (float)wm[j][i];
        }
      q[i] = num[i]/den[i];
      if (q[i] > 1) q[i] = 1;
    }
  weights_changed = 0;
}

/* Internal Function */
void RNN::genop(int quant)
{
  int i, j, k;
  int d;
  float *qp;
  
  qp = new float[N];
  for (i=0;i<N;i++)
    q[i] = 0;
  do
    {
      d = 1;
      for (i=0;i<N;i++)
        {
	  if (weights_changed) {
	    r[i] = 0;
	    for (j=0;j<N;j++)
	      r[i] += wp[i][j] + wm[i][j];
	    if (r[i] == 0) r[i] = rate;
	  }
          den[i] = r[i] + lm[i]*r[i];
          num[i] = lp[i] * r[i];
          for (j=0;j<N;j++)
	    {
	      num[i] += q[j] * wp[j][i];
	      den[i] += q[j] * wm[j][i];
	    }
        }
      for (i=0;i<N;i++) {
	qp[i] = num[i]/den[i];
	if (qp[i] > 1) qp[i] = 1;
	if (i >= IP) {
	  qp[i] = (int) (qp[i] * (float)quant) / (float)quant;
	  if (qp[i] > 1) qp[i] = 1;
	}
	if (fabs(qp[i] - q[i]) > 0.003) d = 0;
	q[i] = qp[i];
      }
    } while (d == 0);
  delete qp;
  weights_changed = 0;
}

/* Internal function */
void RNN::ffop(int quant)
{
  int i, j;
  int b1,b2,e1,e2;
  
  for (i=0;i<N;i++)
    {
      switch(typ[i]) {
      case 1:  {
	b1 = IP;
	e1 = IP + HN;
	b2 = 0;
	e2 = 0;
	break;
      }
      case 2: {
	e1 = N;
	b1 = IP + HN;
	b2 = 0;
	e2 = IP;
	break;
      }
      case 3: {
	b1 = 0;
	e1 = 0;
	b2 = IP;
	e2 = b2 + HN;
	break;
      }
      }
      if (weights_changed) {
	r[i] = 0;
	for (j=b1;j<e1;j++)
	  r[i] += wp[i][j] + wm[i][j];
	if (r[i] == 0) r[i] = rate;
      }
      den[i] = r[i] + lm[i] * r[i];
      num[i] = lp[i] * r[i];
      for (j=b2;j<e2;j++)
        {
          num[i] += q[j] * wp[j][i];
          den[i] += q[j] * wm[j][i];
        }
      q[i] = num[i]/den[i];
      if (q[i] > 1) q[i] = 1;
      if (i >= IP) q[i] = ( (int) (q[i] * quant) / (float)quant);
    }
  weights_changed = 0;
}

/* create empty rnn */
RNN::RNN() 
{
  N = 0;
  IP = 0;
  HN = 0;
  OP = 0;
  rate = 0;
  eta = 0;
  wp = (float **)NULL;
  wm = (float **)NULL;
  w = (float **)NULL;
  C = (u_char **)NULL;
  den = (float *)NULL;
  GOP = (float *)NULL;
  typ = (u_char *)NULL;
  q = (float *)NULL;
  r = (float *)NULL;
  lp = (float *)NULL;
  lm = (float *)NULL;
}

RNN::~RNN()
{
  if (wp != (float **) NULL) free_fmat(wp,N,N);
  if (wm != (float **) NULL) free_fmat(wm,N,N);
  if (w != (float **) NULL) free_fmat(w,N,N);
  if (C != (u_char **) NULL) free_cmat(C,N,(int)((N+7)/8.0));
  delete GOP;
  delete typ;
  delete q;
  delete den;
  delete r;
  delete lp;
  delete lm;
}

/* 
Function: RNN
Purpose:  create defined rnn 
Arguments:
  ip: number of input type neurons
  hn: number of hidden type neurons
  op: number of output type neurons
  ETA: learning rate for the RNN
  RATE: defined "rate" for o/p neurons (usually 1)
  rnntype: architecture type of RNN
    0: feed-forward
    1: fully recurrant
    2: free form - define architecture using function connect
*/
RNN::RNN(int ip, int hn, int op, float ETA, float RATE, u_char rnntype)
{
  new_rnn(ip,hn,op,ETA,RATE,rnntype);
}


/* 
Function: copy
Purpose: copy an RNN passed to the function 
Arguments: 
  R: an RNN to copy
*/
void RNN::copy(RNN R)
{
  int i,j;
  if (wp != (float **) NULL) free_fmat(wp,N,N);
  if (wm != (float **) NULL) free_fmat(wm,N,N);
  if (w != (float **) NULL) free_fmat(w,N,N);
  if (C != (u_char **) NULL) free_cmat(C,N,(int)((N+7)/8.0));
  delete GOP;
  delete typ;
  delete q;
  delete den;
  delete r;
  delete lp;
  delete lm;
  new_rnn(R.IP,R.HN,R.OP,R.eta,R.rate,R.RNNTYPE);
  for (i=0;i<N;i++)
    for (j=0;j<N;j++) {
      wp[i][j] = R.wp[i][j];
      wm[i][j] = R.wm[i][j];
    }
  for (i=0;i<N;i++)
    for (j=0;j<(int)((N+7)/8.0);j++) 
      C[i][j] = R.C[i][j];
}

/* 
Function: comp
Purpose: compute sum-squared difference between current RNN and RNN R 
Input:
  R: an RNN to compare to current RNN
*/
float RNN::comp(RNN R)
{
  int i;
  int j;
  float mx1, mx2;
  float err=0,errtmp;
  
  mx1 = fabs(wp[0][0] - wm[0][0]);
  mx2 = fabs(R.wp[0][0] - R.wm[0][0]);

  if (N != R.N) return(0);
  for (i=0;i<N;i++)
    for (j=0;j<N;j++) {
      if (fabs(wp[i][j] - wm[i][j]) > mx1) mx1 = fabs(wp[i][j]-wm[i][j]);
      if (fabs(R.wp[i][j] - R.wm[i][j]) > mx2) mx2 = fabs(R.wp[i][j]-R.wm[i][j]);
    }
  for (i=0;i<N;i++)
    for (j=0;j<N;j++) {
      errtmp=(wp[i][j] - wm[i][j])/mx1 - (R.wp[i][j] - R.wm[i][j]) / mx2;
      err += errtmp*errtmp;
    }
  return err;
}

/* 
Function: init
Purpose: Initialize a previously undefined RNN 
Arguments: 
  ip: number of input neurons
  hn: number of hidden neurons
  op: number of output neurons
  ETA: learning rate
  RATE: output rate for output neurons
  rnntype: architecture type for RNN
*/
void RNN::init(int ip, int hn, int op, float ETA, float RATE, u_char rnntype)
{
  new_rnn(ip,hn,op,ETA,RATE,rnntype);
}

/* 
Function: setETA
Purpose: Set the value of the learning rate 
Arguments:
  e: new learning rate
*/
void RNN::setETA(float e) { eta = e; }

/* 
Function: SNR
Purpose: Compute PSNR of difference between current network o/p and 
    defined "good" output (network's GOP) 
Arguments: none, assumes RNN output has been computed and the desired o/p has 
    been set
*/
float RNN::SNR ()
{  
  int k;
  float err=0;
  for (k=0;k<OP;k++)
    err += (GOP[k] - q[N - OP + k]) * (GOP[k] - q[N - OP + k]);
  err = 10.0 * log10(OP / err);
  return(err);
}

/* 
Function: MSE
Purpose: Compute MSE of difference between current network o/p and 
    defined "good" output (network's GOP) 
Arguments: none, assumes RNN output has been computed and the desired o/p
    has been set
*/
float RNN::MSE ()
{  
  int k;
  float err=0;
  for (k=0;k<OP;k++)
    err += (GOP[k] - q[N - OP + k]) * (GOP[k] - q[N - OP + k]);
  return(err/(float)OP);
}

/* 
Function: display
Purpose: Display an RNN (weights and all)
Arguments: none
*/
void RNN::display()
{
  int i, j;
  printf("Input: %3i\n",IP);
  printf("Hidden: %3i\n",HN);
  printf("Output: %3i\n",OP);
  printf("Total: %3i\n",N);
  printf("Eta: %8.5f\n",eta);
  printf("Rate: %8.5f\n",rate);
  printf("WP:\n");
  for (i = 0; i < N; i++)
    {
      for (j = 0; j < N; j++)
	printf("%8.5f  ",wp[i][j]);
      printf("\n");
    }
  printf("\n\n");
  printf("WM:\n");
  for (i = 0; i < N; i++)
    {
      for (j = 0; j < N; j++)
	printf("%8.5f  ",wm[i][j]);
      printf("\n");
    }
  printf("\n\n");
  printf("Connections:\n");
  for (i = 0; i < N; i++)
    {
      for (j = 0; j < N; j++)
	printf("%1i ",(C[i][j/8] & (1 << (j%8))) >> (j%8));
      printf("\n");
    }
  printf("\n\n");
}

/* 
Function: train
Purpose: Adjust weights so that the network o/p resembles GOP 
Arguments: None, assumes that the input values and desired O/P values 
    have been set
*/
void RNN::train()
{
  computeop();
  if (RNNTYPE == 0) {
    ff_inv();
    ff_computews();
  }
  else { 
    gen_inv();
    gen_computews();
  }
}

/* 
Function: computeop
Purpose: Compute the network's o/p based on current i/p 
Arguments: None, assumes input values have been set
*/
void RNN::computeop()
{
  if (RNNTYPE == 0) ffop();
  else genop();
}

/* 
Function: computeop
Purpose: Compute network's o/p quantized to the base 2 log of quant + 1.  
    i.e. quant of 255 gives 8 bit quantization 
Arguments:
  quant: amount of quantization
*/
void RNN::computeop(int quant)
{
  if (RNNTYPE == 0) ffop(quant);
  else genop(quant);
}

/* 
Function: randomweights
Purpose: Randomize weights, good for networks defining own connections 
Arguments: None, assumes network architecture is defined
*/
void RNN::randomweights()
{
  int i,j;
  for (i=0;i<N;i++)
    for (j=0;j<N;j++) 
      if (C[i][j/8] & (1 << (j%8))) 
	{
	  wm[i][j] = (float)(rand() % 5000) / 50000.;
	  wp[i][j] = (float)(rand() % 5000) / 50000.;
	}
}


/* 
Function: setinputs
Purpose: define inputs for a specific neuron
Arguments:
  i: neuron number
  LP: (lambda plus) external excitation input
  LM: (lambda minus) external inhibition input
*/

void RNN::setinput(int i, float LP, float LM)
{
  lp[i] = LP;
  lm[i] = LM;
}


/*
Function: setinputvector
Purpose: set a vector of inputs at a time
Arguments: 
  n: number of elements in vector
  LP: vector of excitation inputs
  LM: vector of inhibition inputs
*/

void RNN::setinputvector(int n, float *LP, float *LM)
{
  memcpy(lp,LP,n*sizeof(float));
  memcpy(lm,LM,n*sizeof(float));
}

/* 
Function: getop
Purpose: Get output value of i-th output neuron 
Arguments: 
  i: which output neuron to "probe" (starts at 0)
Output: value of i-th output neuron
*/
float RNN::getop(int i) 
{
  return q[i+IP+HN];
}

/* 
Function: getop2
Purpose: Get output value of i-th neuron.  Neurons stored in 
    [i/p hidden o/p] order 
Arguments:
  i: which neuron to probe (between 0 and Number of I/P + Hidden + O/P  
       neurons)
OutpusL value of i-th neuron
*/
float RNN::getop2(int i)
{
  return q[i];
}

/* 
Function: setGOP
Purpose: Set the desired output of the i-th output neuron 
Arguments:
  i: which o/p neuron to change the desired o/p of
  val: desired value for the i-th neuron
*/
void RNN::setGOP(int i, float val)
{
  GOP[i] = val;
}

/* 
Function: setGOPvector
Purpose: Set the desired output of the first n output neurons 
Arguments: 
  n: number of output neurons to change the value of
  val: vector of neuron values
*/
void RNN::setGOPvector(int n, float *val)
{
  memcpy(GOP,val,n*sizeof(float));
}

/* 
Function: connect
Purpose: Make a connection between two neurons 
Arguments:
  i: "from" neuron
  j: "to" neuron
*/
int RNN::connect(int i, int j)
{
  if ((i < N) && (j < N) && (i != j)) {
    C[i][j/8] |= (1 << (j%8));
    return 0;
  }
  if (i == j) return -1;
  return -2;
}

/* 
Function: save
Purpose: Save RNN to file named fn 
Arguments:
  fn: file name to save rnn to
*/
void RNN::save(char *fn)
{
  FILE *f;
  int k,j;
  if ((f = fopen(fn,"wb")) == NULL)
    {
      printf("Cannot open output file!\n");
      exit(-1);
    }
  fwrite(&RNNTYPE, sizeof(u_char), 1, f);
  fwrite(&IP, sizeof(int), 1, f);
  fwrite(&HN, sizeof(int), 1, f);
  fwrite(&OP, sizeof(int),  1, f);
  fwrite(&eta, sizeof(float), 1, f);
  fwrite(&rate, sizeof(float), 1, f);
  for (k=0;k<N;k++)
    fwrite(C[k], sizeof(u_char), (int)((N+7)/8.0), f);
  for (k=0;k<N;k++)
    for (j=0;j<N;j++) 
      if (C[k][j/8] & 1 << (j%8)) {
	fwrite(&(wp[k][j]), sizeof(float), 1, f);
	fwrite(&(wm[k][j]), sizeof(float), 1, f);
      }
  fclose(f);
}

/* Internal Function */
float strange_float(float x)
{
  char *y,*z;
  float t;
  y = (char *)(& x);
  z = (char *)(& t);
  z[0] = y[3];
  z[1] = y[2];
  z[2] = y[1];
  z[3] = y[0];
  return t;
}

/* Internal Function */
int strange_int(int x)
{
  char *y,*z;
  int t;
  y = (char *)(& x);
  z = (char *)(& t);
  z[0] = y[3];
  z[1] = y[2];
  z[2] = y[1];
  z[3] = y[0];
  return t;
}

/* 
Function: load_strange
Purpose: Load RNN from file named fn, reverse endian-ness of stored values
    useful for conversion programs between PC's and workstations
Arguments:
  fn: file name to load from
Notes: I really need to re-write the I/O to be generic to all machines
*/
int RNN::load_strange(char *fn)
{
  FILE *f;
  int i,j,k;
  float l,m;
  u_char rnntype;
  
  if ((f = fopen(fn,"rb")) == NULL) return(0);
  fread(&rnntype, sizeof(rnntype), 1, f);
  fread(&i, sizeof(i), 1, f);
  i = strange_int(i);
  fread(&j, sizeof(j), 1, f);
  j = strange_int(j);
  fread(&k, sizeof(k), 1, f);
  k = strange_int(k);
  fread(&l, sizeof(float), 1, f);
  l = strange_float(l);
  fread(&m, sizeof(float), 1, f);
  m = strange_float(m);
  new_rnn(i,j,k,l,m,rnntype);
  for (k=0;k<N;k++)
    fread(C[k], sizeof(u_char), (int)((N+7)/8.0), f);
  for (k=0;k<N;k++)
    for (j=0;j<N;j++) 
      if (C[k][j/8] & 1 << (j%8)) {
	fread(&(wp[k][j]), sizeof(float), 1, f);
	fread(&(wm[k][j]), sizeof(float), 1, f);
	wp[k][j] = strange_float(wp[k][j]);
	wm[k][j] = strange_float(wm[k][j]);
      }
  fclose(f);
  return(1);
}


/*
Function: load
Purpose: Load RNN from file named fn 
Arguments:
  fn: file name to load from
*/
int RNN::load(char *fn)
{
  FILE *f;
  int i,j,k;
  float l,m;
  u_char rnntype;
  
  if ((f = fopen(fn,"rb")) == NULL) return(0);
  fread(&rnntype, sizeof(rnntype), 1, f);
  fread(&i, sizeof(i), 1, f);
  fread(&j, sizeof(j), 1, f);
  fread(&k, sizeof(k), 1, f);
  fread(&l, sizeof(float), 1, f);
  fread(&m, sizeof(float), 1, f);
  new_rnn(i,j,k,l,m,rnntype);
  for (k=0;k<N;k++)
    fread(C[k], sizeof(u_char), (int)((N+7)/8.0), f);
  for (k=0;k<N;k++)
    for (j=0;j<N;j++) 
      if (C[k][j/8] & 1 << (j%8)) {
	fread(&(wp[k][j]), sizeof(float), 1, f);
	fread(&(wm[k][j]), sizeof(float), 1, f);
      }
  fclose(f);
  return(1);
}

/* 
Function: dispop
Purpose: Display desired and current o/p of output neurons 
Arguments: none
*/
void RNN::dispop()
{
  int i;
  for (i=0;i<OP;i++)
    printf("%8.5f  ",GOP[i]);
  printf(": ");
  for (i=0;i<OP;i++)
    printf("%8.5f  ",q[i+IP+HN]);
  printf("\n");
}

/* 
Function: dispop2
Purpose: Display positive input to i/p neurons and o/p of o/p neurons 
Arguments: none
*/
void RNN::dispop2()
{
  int i;
  for (i=0;i<IP;i++)
    printf("%8.5f  ",lp[i]);
  printf(": ");
  for (i=0;i<N;i++)
    printf("%8.5f  ",q[i]);
  printf("\n");
}

/* 
Function: decompress
Purpose: Compute o/p of o/p neurons if hidden neurons are set to vector h 
Arguments: 
  h: floating point vector of values for hidden neurons 
*/
void RNN::decompress(float *h) {
  int i,j;
  for (i=0;i<HN;i++)
    q[IP+i] = h[i];
  for (i=IP+HN;i<N;i++) {
    r[i] = rate;
    den[i] = r[i];
    num[i] = 0;
    for (j=IP;j<IP+HN;j++) {
      num[i] += (float)q[j] * (float)wp[j][i];
      den[i] += (float)q[j] * (float)wm[j][i];
    }
    q[i] = num[i]/den[i];
    if (q[i] > 1) q[i] = 1;
  }
}  

/*
Function: getArch
Purpose: return the architecture of the RNN
Arguments:
  s: integer vector to store number of I/P, Hidden and O/P neurons in
*/
void RNN::getArch(int *s) {
  s[0] = IP;
  s[1] = HN;
  s[2] = OP;
}