View unanswered posts | View active topics It is currently Thu Nov 27, 2014 1:15 pm






Reply to topic  [ 4 posts ] 
ROBOTC 1.37 BETA 1 Release 
Author Message
Site Admin
Site Admin

Joined: Wed Jan 24, 2007 10:42 am
Posts: 614
Post ROBOTC 1.37 BETA 1 Release
http://robotc.net/downloads/ROBOTCforMi ... BETA_1.exe

We're shooting for Friday for the 1.40 release, so please let us know if you run into any issues ASAP! Thanks!

_________________
Timothy Friez
ROBOTC Developer - SW Engineer
tfriez@robotc.net


Last edited by tfriez on Fri Jun 06, 2008 4:10 pm, edited 1 time in total.



Wed Jun 04, 2008 4:36 pm
Profile
Guru
User avatar

Joined: Sat Mar 01, 2008 12:52 pm
Posts: 1030
Post 
is there still the same compiler error (ERROR Internal...) as before?
Here's to you... :wink:

[EDIT:] YES, IT IS ! Image
Code:
// Lernfaehiges Neuronales Netz
// Feed-Forward Netz mit 3 Sensor-Eingaengen (Touch an S1, S2, S3)
// und 2 Ausgabe-Neurons (Anzeige auf dem Display)
// (c) H. W. 2008
// neu: Vorbereitungen fuer mehrschichtige Netze & Backpropagation
   string Version="0.412";

#define printXY nxtDisplayStringAt
#define println nxtDisplayTextLine


//**********************************************************************
// Basisdeklarationen fuer Neuronale Netze
//**********************************************************************


const int nl0 =  2;    // max. Neurons in Schicht (Layer) 0
const int nl1 =  1;    // max. Neurons in Schicht (Layer) 1
const int nl2 =  1;    // max. Neurons in Schicht (Layer) 2
const int nl3 =  1;    // max. Neurons in Schicht (Layer) 3


const int ni = 3;      // max. Dendriten-Eingaenge (Zaehler ab 0)
float lbd    = 0.2;    // Lern-Index / Faktor lambda

int key;               // gedrueckte NXT-Taste
string MenuText="";    // Menue-Steuerung

float sollOut=0;


//**********************************************************************
// Neuron-Struktur (vereinfachte Version)
//**********************************************************************

typedef struct{
   float in[ni];    // Einzel-Inputs (Dendriten)
   float w[ni];     // Einzel-Wichtungen (jedes Dendriten)
   float net;       // totaler Input
   float th;        // Schwellenwert (threshold)
   float d;         // delta=Fehlersignal
   float out;       // Output (Axon): z.B. 0 oder 1
} tNeuron;

//**********************************************************************

tNeuron Neuron0[nl0];  // Neurons-Schicht 0
tNeuron Neuron1[nl1];  // Neurons-Schicht 1
tNeuron Neuron2[nl2];  // Neurons-Schicht 2
tNeuron Neuron3[nl3];  // Neurons-Schicht 3


//**********************************************************************
//  mathematische Hilfsfunktionen
//**********************************************************************


float tanh(float x)  // Tangens hyperbolicus
{
   float e2x;
   e2x=exp(2*x);
   return((e2x-1)/(e2x+1));
}

//**********************************************************************
// Ein-/ Ausgabefunktionen (Tatstatur, Display)
//**********************************************************************

int buttonPressed(){

  TButtons nBtn;
  nNxtExitClicks=4; // gegen versehentliches Druecken

  nBtn = nNxtButtonPressed; // check for button press
   switch (nBtn)     {
      case kLeftButton: {
           return 1;   break;     }

        case kEnterButton: {
             return 2;   break;     }

        case kRightButton: {
             return 3;   break;     }

        case kExitButton: {
             return 4;   break;     }

        default: {
             return 0;   break;     }
   }
   return 0;
}

//*****************************************

int getkey() {
   int k, buf;

   k=buttonPressed();
   buf=buttonPressed();
  while (buf!=0)
  { buf=buttonPressed(); }
  return k;
}

//**********************************************************************

task DisplayValues(){
  int i;  // inputs = sensors
  int j;  // neuron number = outputs
   while(true) {

    printXY( 0, 63, "IN:");
                             printXY(48, 55, "|");
                             printXY(48, 47, "|");
    printXY( 0, 39, "th=");  printXY(48, 39, "|");
    printXY( 0, 31, "OUT");  printXY(48, 31, "|");




     for (j=0;j<=1;j++) {
         printXY(15, 63, "%2.0f", Neuron0[j].in[0]);
         printXY(26, 63, "%2.0f", Neuron0[j].in[1]);
         printXY(37, 63, "%2.0f", Neuron0[j].in[2]);

         printXY(00+(j*53), 55, "%3.1f", Neuron0[j].w[0]);
         printXY(12+(j*53), 47, "%3.1f", Neuron0[j].w[1]);
         printXY(24+(j*53), 55, "%3.1f", Neuron0[j].w[2]);

         printXY(25+(j*45), 39, "%3.1f", Neuron0[j].th);

         printXY(25+(j*45), 31, "%2.0f", Neuron0[j].out);
    }

    // Menue-Zeilen fuer Tastatur-Steuerung

    println(7, "%s", MenuText);


  }
  return;
}

//**********************************************************************

void Pause() {
   while(true) wait1Msec(50);
}


//**********************************************************************
// File I/O
//**********************************************************************
const string sFileName = "Memory.dat";

TFileIOResult nIoResult;
TFileHandle   fHandle;

int   nFileSize     = (nl0+nl1+nl2+nl3+1)*100;


void SaveMemory()
{
   int i, j;

   CloseAllHandles(nIoResult);
   wait1Msec(500);
   PlaySound(soundBeepBeep);
   wait1Msec(11);

   Delete(sFileName, nIoResult);

  OpenWrite(fHandle, nIoResult, sFileName, nFileSize);
  if (nIoResult==0) {
    eraseDisplay();

    for (j=0;j<nl0;j++)
    {
      for (i=0; i<ni;i++) {
         WriteFloat (fHandle, nIoResult, Neuron0[j].w[i]);
      }
      WriteFloat (fHandle, nIoResult, Neuron0[j].th);

    }


    Close(fHandle, nIoResult);
    if (nIoResult==0) PlaySound(soundUpwardTones);
    else PlaySound(soundException);
  }
  else PlaySound(soundDownwardTones);

}

//*****************************************

void RecallMemory()
{
  int i, j;
   CloseAllHandles(nIoResult);
   wait1Msec(500);
   PlaySound(soundBeepBeep);
   wait1Msec(11);

   OpenRead(fHandle, nIoResult, sFileName, nFileSize);
  if (nIoResult==0) {

    j=0;
    for (j=0;j<nl0;j++)
    {
      for (i=0; i<ni;i++) {
         ReadFloat (fHandle, nIoResult, Neuron0[j].w[i]);
      }
      ReadFloat (fHandle, nIoResult, Neuron0[j].th);

    }

    Close(fHandle, nIoResult);
    if (nIoResult==0) PlaySound(soundUpwardTones);
    else PlaySound(soundException);
  }
  else PlaySound(soundDownwardTones);


  eraseDisplay();

}


//**********************************************************************
// Funktionen des neuronalen Netzes
//**********************************************************************
//**********************************************************************
// Propagierungsfunktionen: Eingaenge gewichtet aufsummieren (in -> net)
//**********************************************************************

void netPropag(tNeuron &neur){      // Propagierungsfunktion 1
  int i=0;                          // kalkuliert den Gesamt-Input (net)
  float s=0;

  for(i=0;i<ni;i++){
     s+= (neur.in[i]*neur.w[i]);     // gewichtete Summe
  }
  neur.net=s;
}

void netPropagThr(tNeuron &neur){   // Propagierungsfunktion 2
  int i=0;                          // kalkuliert den Gesamt-Input (net)
  float s=0;                        // und beruecksichtigt Schwellwert

  for(i=0;i<ni;i++){
     s+= (neur.in[i]*neur.w[i]);     // gewichtete Summe
  }
  neur.net=s-neur.th;               // abzueglich Schwellwert
}

//**********************************************************************
// Aktivierungsfunktionen inkl. Ausgabe (net -> act -> out)
//**********************************************************************


void act_01(tNeuron &neur){         // Aktivierungsfunktion 1 T1: x -> [0; +1]
   if (neur.net>=0)                  // 0-1-Schwellwertfunktion
      {neur.out=1;}                  // Fkt.-Wert: 0 oder 1
   else {neur.out=0;}
}

void actIdent(tNeuron &neur){       // Aktivierungsfunktion 2 T2: x -> x
   neur.out=neur.net;                // Identitaets-Funktion
}                                   // Fkt-Wert: Identitaet


void actFermi(tNeuron &neur){       // Aktivierungsfunktion 3 T3: x -> [0; +1]
   float val;                        // Fermi-Fkt. (Logistisch, differenzierbar)
   float c=3.0;                      // c= Steilheit, bei c=1: flach,
  val= (1/(1+(exp(-c*neur.net))));  // c=10: Sprung zwischen x E [-0.1; +0.1]
  neur.out=val;
}

void actTanH(tNeuron &neur){        // Aktivierungsfunktion 4 T4: x -> [-1; +1]
   float val;                        // Tangens Hyperbolicus, differenzierbar
   float c=2.0;                      // c= Steilheit, bei c=1: flach
  val= tanh(c*neur.net);            // c=3: Sprung zwischen x E [-0.1; +0.1]
  neur.out=val;
}



//**********************************************************************
// Reset / Init
//**********************************************************************

void ResetNeuron(tNeuron &neur){ // alles auf Null
   int i;

   for (i=0; i<ni; i++) {
      neur.in[i]=0;      // Einzel-Input (Dendrit)
     neur.w[i]=0;       // Einzel-Wichtung (Dendrit)
   }
   neur.net=0;          // totaler Input
   neur.th=0;           // Schwellenwert (threshold)
   neur.out=0;          // errechneter Aktivierungswert=Output
   }

//*****************************************

void InitAllNeurons(){              // alle Netz-Neurons auf Null
   int j;

  for (j=0; j<nl0; j++) {           // Neuron-Schicht 0
        ResetNeuron(Neuron0[j]);}

  for (j=0; j<nl0; j++) {           // Neuron-Schicht 1
        ResetNeuron(Neuron1[j]);}

  for (j=0; j<nl0; j++) {           // Neuron-Schicht 2
        ResetNeuron(Neuron2[j]);}

  for (j=0; j<nl0; j++) {           // Neuron-Schicht 3
        ResetNeuron(Neuron3[j]);}
}

//*****************************************


void InitThisNeuralNet()
{
  ; // defaults
}


void PrepThisNeuralNet()  // for testing
{
   ; // defaults
}


//**********************************************************************
// Inputs
//**********************************************************************

task RefreshInputLayer(){  // Inputs sollen sehr schnell erfasst werden, daher als eigener Task
int i, j;
  while(true){
  for (j=0; j<nl0; j++) {
    for (i=0; i<ni; i++)   {
      Neuron0[j].in[i]=(float)SensorValue(i); // Input 0: Touch-Sensor an S1=0
      }
    }
  }
  return;
}

//*****************************************

void SetInputPattern(int m, int n, int o)
{
   int j;
   for (j=0; j<nl0;j++)
  {
     Neuron0[j].in[0]=(float)m;
     Neuron0[j].in[1]=(float)n;
     Neuron0[j].in[2]=(float)o;
   }
}

//**********************************************************************
// einzelne Neurons schichtenweise durchrechnen
//**********************************************************************

task RefreshLayers(){
  int j;
  while(true){
    for (j=0;j<nl0;j++) {
       netPropagThr(Neuron0[j]);  // gewichtete Summe Layer 0 abzgl. Schwellwert
      act_01(Neuron0[j]);        // Aktivitaet per 0-1-Schwellwert-Funktion
    }
  }
  return;
}

//**********************************************************************
// Lernverfahren
//**********************************************************************


void LearnPerceptronRule() {         // Lern-Modus nach Delta-Regel
  int ErrorCount;
  int m,n,o;  // Sensor-Kombinationen
  int i;  // Inputs

  int j;  // Anzahl Ausgabe-Neurons

 do {
  ErrorCount=0;
  PlaySound(soundBeepBeep);
  MenuText="-- <<  ok  >> ++";

  for (m=0; m<2; m++)    {
    for (n=0; n<2; n++)   {
     for (o=0; o<2; o++)   {
     SetInputPattern(m,n,o);           // virtuelles Muster praesentieren
     wait1Msec(200);

     for (j=0;j<2;j++)
     {

       sollOut=Neuron0[j].out;   // 0
       MenuText="-- <<  ok  >> ++";
       printXY(0,23, "soll:");
       printXY(25,23,"%2.0f", sollOut);
      do                        // erzeugten Output berichtigen
      {
         key=getkey();

         if (key==1) {   if (sollOut>0) sollOut-=1;  }
         else
         if (key==3) { if (sollOut< 1) sollOut+=1;  }
        printXY(0,23, "soll:");
         printXY(25,23,"%2.0f", sollOut);
        wait1Msec(100);
      } while ((key!=2)&&(key!=4));

      println(5, " ");

      //...................................................
      if (key==4) {                     // Lern-Modus ENDE
         PlaySound(soundException);
         key=0;
         return;
      }  // if key
      //....................................................

                                       // Lern-Modus START

      if (sollOut==Neuron0[j].out)
        {
            PlaySound(soundBlip);         // teachOut korrekt
         PlaySound(soundBlip);
         wait1Msec(100);
      }  //
        else
        {                                // teachOut falsch
           PlaySound(soundException);
           wait1Msec(100);
        ErrorCount+=1;


           if (sollOut!=Neuron0[j].out)
           {
          for (i=0; i<=nl0; i++)        // fuer alle i (Inputs)
              {                             // Wichtungen anpassen (Delta-Regel)
                 Neuron0[j].w[i] = Neuron0[j].w[i]+ (lbd*Neuron0[j].in[i]*(sollOut-Neuron0[j].out));
              }
           } //
           if (sollOut!=Neuron0[j].out)    // Schwelle  anpassen
           {
              Neuron0[j].th = Neuron0[j].th - (lbd*(sollOut-Neuron0[j].out));
           } //

      }  // else

    }  // for j

    }  // for o
   }  // for n
  }  // for m
 } while (ErrorCount>0);

PlaySound(soundUpwardTones);
PlaySound(soundUpwardTones);
}

//**********************************************************************
// Programmablauf-Steuerung, Menues
//**********************************************************************

int Menu_Recall() {
  eraseDisplay();
  MenuText="<Recall    Clear>";
  println(7, "%s", MenuText);
  println(0, "%s", " Hal "+Version);
  println(1, "%s", "----------------");
  println(2, "%s", "Reload my brain -");
  println(4, "%s", " Total Recall ?");
  do
  {
     key=getkey();
     if (key==1)    {  return 1;   }
     if (key==2)    {  PlaySound(soundException);   }
     if (key==3)    {  return 3;   }
     if (key==4)    {  PlaySound(soundException); }

     wait1Msec(100);
  }
  while ((key==0)||(key==2)||(key==4));
}



int Menu_LearnSaveRun() {
  eraseDisplay();
  MenuText="<Learn  Sav  Run>";
  do
  {
     key=getkey();
     if (key==1)    {  return 1;   }
     if (key==2)    {  SaveMemory(); }
     if (key==3)    {  return 3;   }
     if (key==4)    {  PlaySound(soundException); }

     wait1Msec(100);
  }
  while ((key==0)||(key==2)||(key==4));
}

//**********************************************************************
// Hauptprogramm
//**********************************************************************
int choice;


task main(){
  SensorType(S1)=sensorTouch;
  SensorType(S2)=sensorTouch;
  SensorType(S3)=sensorTouch;

  InitAllNeurons();
  InitThisNeuralNet();

  choice=Menu_Recall();
  if (choice==1)  { RecallMemory(); } // altes Gedaechtnis laden

  StartTask (DisplayValues);
  StartTask (RefreshLayers);

  while(true)
  {
    choice=Menu_LearnSaveRun();
    if (choice==1)
    {
       StopTask(RefreshInputLayer);
       LearnPerceptronRule();          // Lern-Modus
    }
    MenuText="Menue: [ESC]";
    PlaySound(soundFastUpwardTones);
    StartTask (RefreshInputLayer);    // Run-Modus
    do
    {
       key=getkey();
      wait1Msec(100);
    } while (key!=4);
  }

}


_________________
regards,
HaWe aka Ford
#define S sqrt(t+2*i*i)<2
#define F(a,b) for(a=0;a<b;++a)
float x,y,r,i,s,j,t,n;task main(){F(y,64){F(x,99){r=i=t=0;s=x/33-2;j=y/32-1;F(n,50&S){t=r*r-i*i;i=2*r*i+j;r=t+s;}if(S){PutPixel(x,y);}}}while(1)}


Last edited by Ford Prefect on Thu Jun 05, 2008 2:16 pm, edited 6 times in total.



Thu Jun 05, 2008 4:18 am
Profile
Moderator
Moderator
User avatar

Joined: Wed Mar 05, 2008 8:14 am
Posts: 3293
Location: Rotterdam, The Netherlands
Post 
Ford Prefect, the compiler killer.

_________________
| Professional Conduit of Reasonableness
| (Title bestowed upon on the 8th day of November, 2013)
| My Blog: I'd Rather Be Building Robots
| ROBOTC 3rd Party Driver Suite: [Project Page]


Thu Jun 05, 2008 4:26 am
Profile WWW
Guru
User avatar

Joined: Sat Mar 01, 2008 12:52 pm
Posts: 1030
Post 
no, the compiler kills ME!

Image Image Image Image
Code:
for (j=0;j<=1;j++) {
         printXY(15, 63, "%2.0f", Neuron0[j].in[0]);
         printXY(26, 63, "%2.0f", Neuron0[j].in[1]);
         printXY(37, 63, "%2.0f", Neuron0[j].in[2]);

         printXY(00+(j*53), 55, "%3.1f", Neuron0[j].w[0]);
         printXY(12+(j*53), 47, "%3.1f", Neuron0[j].w[1]);
         printXY(24+(j*53), 55, "%3.1f", Neuron0[j].w[2]);

         printXY(25+(j*45), 39, "%3.1f", Neuron0[j].th);

         printXY(25+(j*45), 31, "%2.0f", Neuron0[j].out);
    }         


**Error**:Internal. Bad temp index in releasing temporary. 30(float). Allocation Index 0/-131Pass/Seq: Emit Code:20
**Error**:Internal. Bad temp index in releasing temporary. 30(float). Allocation Index 0/-131Pass/Seq: Emit Code:22
**Error**:Internal. Bad temp index in releasing temporary. 30(float). Allocation Index 1/-131Pass/Seq: Emit Code:26
**Error**:Internal. Bad temp index in releasing temporary. 30(float). Allocation Index 1/-131Pass/Seq: Emit Code:30
**Error**:Internal. Bad temp index in releasing temporary. 30(float). Allocation Index 1/-131Pass/Seq: Emit Code:34
**Error**:Internal. Bad temp index in releasing temporary. 30(float). Allocation Index 1/-131Pass/Seq: Emit Code:38
**Error**:Internal. Bad temp index in releasing temporary. 30(float). Allocation Index 1/-131Pass/Seq: Emit Code:42
**Error**:Internal. Bad temp index in releasing temporary. 30(short). Allocation Index 1/-136Pass/Seq: Emit Code:50
**Error**:Internal. Bad temp index in releasing temporary. 30(float). Allocation Index 0/-136Pass/Seq: Emit Code:54
**Error**:Internal. Bad temp index in releasing temporary. 30(short). Allocation Index 1/-141Pass/Seq: Emit Code:64
**Error**:Internal. Bad temp index in releasing temporary. 30(float). Allocation Index 0/-141Pass/Seq: Emit Code:68
**Error**:Internal. Bad temp index in releasing temporary. 30(float). Allocation Index 0/-223Pass/Seq: Emit Code:180
**Error**:Internal. Bad temp index in releasing temporary. 30(float). Allocation Index 0/-223Pass/Seq: Emit Code:182

Image Image Image Image

_________________
regards,
HaWe aka Ford
#define S sqrt(t+2*i*i)<2
#define F(a,b) for(a=0;a<b;++a)
float x,y,r,i,s,j,t,n;task main(){F(y,64){F(x,99){r=i=t=0;s=x/33-2;j=y/32-1;F(n,50&S){t=r*r-i*i;i=2*r*i+j;r=t+s;}if(S){PutPixel(x,y);}}}while(1)}


Last edited by Ford Prefect on Thu Jun 05, 2008 4:15 pm, edited 5 times in total.



Thu Jun 05, 2008 5:15 am
Profile
Display posts from previous:  Sort by  
Reply to topic   [ 4 posts ] 

Who is online

Users browsing this forum: No registered users and 2 guests


You cannot post new topics in this forum
You cannot reply to topics in this forum
You cannot edit your posts in this forum
You cannot delete your posts in this forum
You cannot post attachments in this forum

Search for:
Jump to:  



Powered by phpBB © 2000, 2002, 2005, 2007 phpBB Group.
Designed by ST Software for PTF.