diff --git a/README b/README
index 4a2bab87175ee9a79aa663b17c380a2bee706207..bc0de12b966c7569610f97979c2145126cd8b392 100644
--- a/README
+++ b/README
@@ -1,27 +1,37 @@
-Deps:
+Deps
+----
 
-- gtk3 for python2 (package is python2-gobject in archlinux, as well as gtk3)
+- gtk3 for python2 (package is python2-gobject in archlinux)
 - liblo with python2 bindings (OSC library)
-- get and compile https://github.com/alumae/gst-kaldi-nnet2-online (which requires kaldi)
 
-Install:
+Install
+-------
 
-./download-models.sh
+1) ./download-models.sh
 
-copy libgstkaldionline2.so to ./asr/ or change GST_PLUGIN_PATH in main.py to point to its directory
+2) get and build gst plugin from https://gitlab.lif.univ-mrs.fr/benoit.favre/gst-kaldi-nnet2-online-rocio
 
-Run:
+3) copy libgstkaldionline2.so to ./asr/ or change GST_PLUGIN_PATH in main.py to point to its directory
 
-- The main program:
+4) go to slu/src and build the slu library with make (requires openfst)
+
+Run
+---
+
+1) The main program:
 ./start.sh
 
-- The osc server:
+2) The osc server (optional):
 python2 osc.py
 
-Doc:
+Documentation
+-------------
+
+Some doc for gtk development:
 developing with pygtk3: http://lazka.github.io/pgi-docs/, https://python-gtk-3-tutorial.readthedocs.org/en/latest/
 
-Todo:
+Todo
+----
 
 DONE configuration for osc
 DONE non intrusive animated scrolling
@@ -35,13 +45,13 @@ DONE click section = select that section
 DONE click action = perform action 
 DONE add thread for slu
 DONE remove section changer UI
+DONE change xml view to reflect already performed actions, already recognized text
 
 events = click action or words to resynchronize ?
 click line = synchronize to that line
 click action = synchronize to the next line
 insert timer in main ui, use it for logger
 add logger
-change xml view to reflect already performed actions, already recognized text
 move slu to asr
 make selector a proper window
 allow sequence advance in slu, add UI for that
diff --git a/actions.py b/actions.py
index 43a68495172dc17eb318586633459e6a791c3a78..ed7849b5b63ffbb32d4c9cb86cf6a15286725531 100644
--- a/actions.py
+++ b/actions.py
@@ -1,3 +1,4 @@
+import re
 import osc, log
 
 class Action:
@@ -33,6 +34,16 @@ def setup(confirmer, highlighter, logger=log.ConsoleLogger()):
 def perform_action(action, confirm=True, timeout=3):
     global manager
     manager.perform(action, confirm, timeout)
-        
 
+#action(1,1,"#ENDSECTION(1)","")
+def parse_slu_action(text):
+    found = re.search(r'^action\((\d+),(\d+),"(([^"\\]|\\")*)","(([^"\\]|\\")*)"\)$', text)
+    if found:
+        section_id = int(found.group(1))
+        sequence_id = int(found.group(2))
+        action_name = found.group(3)
+        action_text = found.group(5)
+        return Action(action_name, section=section_id, sequence=sequence_id, words=action_text)
+    print "Warning: could not parse slu action '%s'" % text
+    return Action(text)
 
diff --git a/asr.py b/asr.py
index dc6a8c7b4a32cd8e37456b28098981abbaa3bed2..52a066961d1763baa35adfa1d15fa2e0be111dc2 100644
--- a/asr.py
+++ b/asr.py
@@ -120,7 +120,6 @@ class ASR(Gtk.HBox):
         self.button.set_sensitive(True)
 
     def _on_partial_result(self, asr, hyp):
-        print 'PARTIAL', self.hyp, hyp
         """Delete any previous selection, insert text and select it."""
         Gdk.threads_enter()
         if len(self.hyp) == 0:
@@ -129,19 +128,20 @@ class ASR(Gtk.HBox):
         if hyp != self.hyp[-1]:
             osc.client.send_words(len(self.hyp), hyp)
 
-        self.hyp[-1] = hyp
-        if self.partial_hyp_callback:
-            self.partial_hyp_callback(self.hyp)
+            self.hyp[-1] = hyp
+            #print 'PARTIAL', self.hyp
+            if self.partial_hyp_callback:
+                self.partial_hyp_callback(self.hyp)
 
-        hyp += '...'
-        self.insert = self.buffer.get_iter_at_line(self.buffer.get_line_count() - 1)
-        self.buffer.delete(self.insert, self.buffer.get_end_iter())
-        self.buffer.insert(self.insert, hyp)
+            hyp += '...'
+            self.insert = self.buffer.get_iter_at_line(self.buffer.get_line_count() - 1)
+            self.buffer.delete(self.insert, self.buffer.get_end_iter())
+            self.buffer.insert(self.insert, hyp)
 
         Gdk.threads_leave()
                 
     def _on_final_result(self, asr, hyp):
-        print 'FINAL', self.hyp, hyp
+        #print 'FINAL', self.hyp, hyp
         Gdk.threads_enter()
         if len(self.hyp) == 0:
             self.hyp = ['']
diff --git a/data/style.css b/data/style.css
index dca8aa2b8882af2ac60c9bcb87b1582a71239d20..90f6e476732d4a49fe91a4e3e528415820087285 100644
--- a/data/style.css
+++ b/data/style.css
@@ -5,6 +5,9 @@
 
 .text-line {
 }
+.keyword-highlighted {
+    color: red;
+}
 .keyword {
 }
 .text {
@@ -44,7 +47,7 @@
 }
 
 .confirm {
-    font: bold 14;
+    font: bold 20;
     background: #ff9999;
 }
 
diff --git a/main.py b/main.py
index 98c5592435da9d54ea81f56af1b2f681a52e81d3..5f1e76b5550ed963bd9b1e6818e179ee5eb5507b 100644
--- a/main.py
+++ b/main.py
@@ -39,8 +39,6 @@ class ScriptedASR(Gtk.Window):
 
         self.xmlview = xmlview.XmlView(xml_filename)
         vbox.pack_start(self.xmlview, True, True, 5)
-        self.lines = [x for x in self.xmlview.get_line_iterator()]
-        self.current_line = -1
 
         self.confirmer = confirm.ConfirmationBox()
         vbox.pack_start(self.confirmer, False, True, 5)
@@ -50,13 +48,16 @@ class ScriptedASR(Gtk.Window):
         vbox.pack_start(self.asr, False, True, 5)
 
         # slu
+        #prefix = 'slu/automate/homeostasis_25nov_%s'
+        #library = 'slu/src.new/librocio_slu.so'
         prefix = 'slu/automate/homeostasis_25nov_%s'
+        library = 'slu/src/librocio_slu.so'
         self.slu = {}
         for section_fst in glob.glob(prefix % 'section*.fst'):
             found = re.search('section(\d+)\.fst$', section_fst)
             if found:
                 section_id = int(found.group(1))
-                self.slu[section_id - 1] = slu.SLU(prefix % 'dico_word.txt', prefix % 'dico_action.txt', section_fst, prefix % 'clean_tail.fst')
+                self.slu[section_id - 1] = slu.SLU(prefix % 'dico_word.txt', prefix % 'dico_action.txt', section_fst, prefix % 'clean_tail.fst', library=library)
 
         self.add(vbox)
         self.show_all()
@@ -71,49 +72,37 @@ class ScriptedASR(Gtk.Window):
         # setup singletons 
         osc.setup(osc_host, osc_port)
         actions.setup(self.confirmer, self.xmlview)
+        self.current_section = 0
 
-    def line_clicked(self, widget, event):
-        if self.current_line >= 0:
-            self.lines[self.current_line].highlight(False)
-        for i, line in enumerate(self.lines):
-            if widget is line:
-                self.current_line = i
-                self.lines[self.current_line].highlight(True)
+    def set_section(self, section_id):
+        self.xmlview.set_section(section_id)
 
     def slu_finished(self, model, slu_output):
         for action_id in range(self.previous_actions, model.num_actions()):
-            action = model.get_action(action_id)
-            actions.perform_action(actions.Action(action))
+            action = actions.parse_slu_action(model.get_action(action_id))
+            print action.text
+            if action.text.startswith('#ENDSEQUENCE('):
+                pass
+            elif action.text.startswith('#ENDSECTION('):
+                new_section = self.xmlview.get_section() + 1
+                self.confirmer.confirm('Go to section %d?' % (new_section + 1), 3, lambda: self.set_section(new_section))
+            else:
+                self.xmlview.highlight(action)
+                actions.perform_action(action, False)
 
     def hyp_changed(self, hyp):
         #hyp = ' '.join(hyp).replace('[noise]', ' ').split()
         words = hyp[-1].strip().replace('_', ' ').split()
         section_id = self.xmlview.get_section()
-        print section_id
+        if self.current_section != section_id:
+            self.previous_actions = 0
+            self.current_section = section_id
+        #print section_id
         if section_id in self.slu:
             model = self.slu[section_id]
             self.previous_actions = model.num_actions()
             model.process(words, self.slu_finished)
 
-        #if self.current_line >= len(self.lines) - 1:
-        #    print "FINISHED"
-        #    return
-        #line = self.lines[self.current_line + 1].text.split()
-        #import levenstein
-        #num_errors, num_ref, alignment, score = levenstein.align(line, words)
-        #num_matches = 0
-        #for ref_word, hyp_word in alignment:
-        #    if ref_word == hyp_word and ref_word != None:
-        #        num_matches += 1
-        #score = float(num_matches) / max(len(line), len(words))
-        #print 'ASR:', hyp[-1], 'REF:', self.lines[self.current_line + 1].text, 'score:', score
-        #levenstein.print_alignment(alignment)
-        #if score >= 0.5:
-        #    if self.current_line >= 0:
-        #        self.lines[self.current_line].highlight(False)
-        #    self.current_line += 1
-        #    self.lines[self.current_line].highlight(True)
-
     def quit(self, window):
         for slu in self.slu.values():
             slu.shutdown()
diff --git a/slu.py b/slu.py
index d6fd9644efe6d29c3b73c01b0c73cdba11106431..0d276aa025323e2ead6b6be5af8e7ef74c8c8d0a 100644
--- a/slu.py
+++ b/slu.py
@@ -17,7 +17,7 @@ class SLU:
             _backend.init_slu.restype = c_void_p
 
             # int run_slu(slu_t* slu, char** words, int num_words, int prevn);
-            _backend.run_slu.argtypes = [c_void_p, POINTER(c_char_p), c_int, c_int]
+            _backend.run_slu.argtypes = [c_void_p, POINTER(c_char_p), c_int, c_int, c_char_p]
             _backend.run_slu.restype = c_int
 
             # int num_actions(slu_t* slu)
@@ -52,7 +52,7 @@ class SLU:
         global _backend, _semaphore
         c_words = (c_char_p * len(words))(*words)
         _semaphore.acquire()
-        output = _backend.run_slu(self.slu, c_words, len(words), self.num_actions())
+        output = _backend.run_slu(self.slu, c_words, len(words), self.num_actions(), None)
         _semaphore.release()
         GLib.idle_add(callback, self, output)
 
@@ -73,7 +73,7 @@ class SLU:
 if __name__ == '__main__':
     prefix = 'slu/automate/homeostasis_25nov_%s'
     slu = SLU(prefix % 'dico_word.txt', prefix % 'dico_action.txt', prefix % 'section6.fst', prefix % 'clean_tail.fst')
-    print 'before'
+    #print 'before'
     slu.process(open('slu/homeostasis_25nov.asr/sect6.ref').read().strip().split(), lambda x: sys.stdout.write('%s\n' % x))
-    print 'after'
+    #print 'after'
     slu.shutdown()
diff --git a/slu/automate/homeostasis_25nov.fst b/slu/automate/homeostasis_25nov.fst
new file mode 100644
index 0000000000000000000000000000000000000000..df590feef0cf226e1fa14080ebff998bd27ee9be
Binary files /dev/null and b/slu/automate/homeostasis_25nov.fst differ
diff --git a/slu/automate/homeostasis_25nov_clean_tail.fst b/slu/automate/homeostasis_25nov_clean_tail.fst
new file mode 100644
index 0000000000000000000000000000000000000000..fa0657a6795f1b795cf0e056b66842d49d6f615e
Binary files /dev/null and b/slu/automate/homeostasis_25nov_clean_tail.fst differ
diff --git a/slu/automate/homeostasis_25nov_clean_tail.txt b/slu/automate/homeostasis_25nov_clean_tail.txt
new file mode 100644
index 0000000000000000000000000000000000000000..62c214108e8997f3df0845061527f06b89c18c96
--- /dev/null
+++ b/slu/automate/homeostasis_25nov_clean_tail.txt
@@ -0,0 +1,534 @@
+0	1	uno	uno
+0	1	dos	dos
+0	1	open	open
+0	1	system	system
+0	1	tell	tell
+0	1	me	me
+0	1	technical	technical
+0	1	characteristics	characteristics
+0	1	read	read
+0	1	next	next
+0	1	yes	yes
+0	1	download	download
+0	1	the	the
+0	1	terms	terms
+0	1	and	and
+0	1	conditions	conditions
+0	1	of	of
+0	1	use	use
+0	1	body	body
+0	1	x	x
+0	1	epsilon	epsilon
+0	1	three	three
+0	1	point	point
+0	1	zero	zero
+0	1	accept	accept
+0	1	install	install
+0	1	new	new
+0	1	version	version
+0	1	give	give
+0	1	my	my
+0	1	data	data
+0	1	tres	tres
+0	1	access	access
+0	1	to	to
+0	1	import	import
+0	1	organic	organic
+0	1	matter	matter
+0	1	temperature	temperature
+0	1	time	time
+0	1	space	space
+0	1	position	position
+0	1	subsystems	subsystems
+0	1	estate	estate
+0	1	quatro	quatro
+0	1	functions	functions
+0	1	localization	localization
+0	1	sensations	sensations
+0	1	passion	passion
+0	1	concentration	concentration
+0	1	perception	perception
+0	1	formal	formal
+0	1	force	force
+0	1	logics	logics
+0	1	imagination	imagination
+0	1	effort	effort
+0	1	nervous	nervous
+0	1	internal	internal
+0	1	network	network
+0	1	upload	upload
+0	1	cinco	cinco
+0	1	memory	memory
+0	1	silence	silence
+0	1	clouds	clouds
+0	1	beautiful	beautiful
+0	1	rain	rain
+0	1	identities	identities
+0	1	storm	storm
+0	1	possibilities	possibilities
+0	1	wifi	wifi
+0	1	search	search
+0	1	redo	redo
+0	1	connection	connection
+0	1	connect	connect
+0	1	this	this
+0	1	function	function
+0	1	looking	looking
+0	1	for	for
+0	1	something	something
+0	1	must	must
+0	1	stay	stay
+0	1	exist	exist
+0	1	a	a
+0	1	thread	thread
+0	1	exists	exists
+0	1	between	between
+0	1	recollection	recollection
+0	1	oblivion	oblivion
+0	1	tension	tension
+0	1	an	an
+0	1	echo	echo
+0	1	emptiness	emptiness
+0	1	rare	rare
+0	1	stays	stays
+0	1	through	through
+0	1	nothing	nothing
+0	1	more	more
+0	1	strange	strange
+0	1	than	than
+0	1	exile	exile
+0	1	absolute	absolute
+0	1	abyss	abyss
+0	1	creaking	creaking
+0	1	bones	bones
+0	1	barbarian	barbarian
+0	1	invasion	invasion
+0	1	carelessness	carelessness
+0	1	destinies	destinies
+0	1	wringing	wringing
+0	1	out	out
+0	1	blood	blood
+0	1	should	should
+0	1	or	or
+0	1	not	not
+0	1	try	try
+0	1	with	with
+0	1	functional	functional
+0	1	regulation	regulation
+0	1	possible	possible
+0	1	erase	erase
+0	1	important	important
+0	1	state	state
+0	1	geolocalization	geolocalization
+0	1	encode	encode
+0	1	located	located
+0	1	constellation	constellation
+0	1	center	center
+0	1	six	six
+0	1	nine	nine
+0	1	some	some
+0	1	threads	threads
+0	1	are	are
+0	1	broken	broken
+0	1	remove	remove
+0	1	love	love
+0	1	errors	errors
+0	1	identical	identical
+0	1	identity	identity
+0	1	identifier	identifier
+0	1	where	where
+0	1	answer	answer
+0	1	being	being
+0	1	boing	boing
+0	1	boot	boot
+0	1	reboot	reboot
+0	1	border	border
+0	1	begun	begun
+0	1	begin	begin
+0	1	win	win
+0	1	actions	actions
+0	1	there	there
+0	1	is	is
+0	1	password	password
+0	1	pancreas	pancreas
+0	1	you	you
+0	1	your	your
+0	1	wins	wins
+0	1	everywhere	everywhere
+0	1	golden	golden
+0	1	protein	protein
+0	1	protection	protection
+0	1	amino-acid	amino-acid
+0	1	bilar	bilar
+0	1	violence	violence
+0	1	segment	segment
+0	1	segregation	segregation
+0	1	want	want
+0	1	y	y
+0	1	querer	querer
+0	1	sequences	sequences
+0	1	producing	producing
+0	1	significant	significant
+0	1	alignments	alignments
+0	1	in	in
+0	1	genetic	genetic
+0	1	database	database
+0	1	record	record
+1	1	<joker>	<joker>
+1	1	uno	uno
+1	1	dos	dos
+1	1	open	open
+1	1	system	system
+1	1	tell	tell
+1	1	me	me
+1	1	technical	technical
+1	1	characteristics	characteristics
+1	1	read	read
+1	1	next	next
+1	1	yes	yes
+1	1	download	download
+1	1	the	the
+1	1	terms	terms
+1	1	and	and
+1	1	conditions	conditions
+1	1	of	of
+1	1	use	use
+1	1	body	body
+1	1	x	x
+1	1	epsilon	epsilon
+1	1	three	three
+1	1	point	point
+1	1	zero	zero
+1	1	accept	accept
+1	1	install	install
+1	1	new	new
+1	1	version	version
+1	1	give	give
+1	1	my	my
+1	1	data	data
+1	1	tres	tres
+1	1	access	access
+1	1	to	to
+1	1	import	import
+1	1	organic	organic
+1	1	matter	matter
+1	1	temperature	temperature
+1	1	time	time
+1	1	space	space
+1	1	position	position
+1	1	subsystems	subsystems
+1	1	estate	estate
+1	1	quatro	quatro
+1	1	functions	functions
+1	1	localization	localization
+1	1	sensations	sensations
+1	1	passion	passion
+1	1	concentration	concentration
+1	1	perception	perception
+1	1	formal	formal
+1	1	force	force
+1	1	logics	logics
+1	1	imagination	imagination
+1	1	effort	effort
+1	1	nervous	nervous
+1	1	internal	internal
+1	1	network	network
+1	1	upload	upload
+1	1	cinco	cinco
+1	1	memory	memory
+1	1	silence	silence
+1	1	clouds	clouds
+1	1	beautiful	beautiful
+1	1	rain	rain
+1	1	identities	identities
+1	1	storm	storm
+1	1	possibilities	possibilities
+1	1	wifi	wifi
+1	1	search	search
+1	1	redo	redo
+1	1	connection	connection
+1	1	connect	connect
+1	1	this	this
+1	1	function	function
+1	1	looking	looking
+1	1	for	for
+1	1	something	something
+1	1	must	must
+1	1	stay	stay
+1	1	exist	exist
+1	1	a	a
+1	1	thread	thread
+1	1	exists	exists
+1	1	between	between
+1	1	recollection	recollection
+1	1	oblivion	oblivion
+1	1	tension	tension
+1	1	an	an
+1	1	echo	echo
+1	1	emptiness	emptiness
+1	1	rare	rare
+1	1	stays	stays
+1	1	through	through
+1	1	nothing	nothing
+1	1	more	more
+1	1	strange	strange
+1	1	than	than
+1	1	exile	exile
+1	1	absolute	absolute
+1	1	abyss	abyss
+1	1	creaking	creaking
+1	1	bones	bones
+1	1	barbarian	barbarian
+1	1	invasion	invasion
+1	1	carelessness	carelessness
+1	1	destinies	destinies
+1	1	wringing	wringing
+1	1	out	out
+1	1	blood	blood
+1	1	should	should
+1	1	or	or
+1	1	not	not
+1	1	try	try
+1	1	with	with
+1	1	functional	functional
+1	1	regulation	regulation
+1	1	possible	possible
+1	1	erase	erase
+1	1	important	important
+1	1	state	state
+1	1	geolocalization	geolocalization
+1	1	encode	encode
+1	1	located	located
+1	1	constellation	constellation
+1	1	center	center
+1	1	six	six
+1	1	nine	nine
+1	1	some	some
+1	1	threads	threads
+1	1	are	are
+1	1	broken	broken
+1	1	remove	remove
+1	1	love	love
+1	1	errors	errors
+1	1	identical	identical
+1	1	identity	identity
+1	1	identifier	identifier
+1	1	where	where
+1	1	answer	answer
+1	1	being	being
+1	1	boing	boing
+1	1	boot	boot
+1	1	reboot	reboot
+1	1	border	border
+1	1	begun	begun
+1	1	begin	begin
+1	1	win	win
+1	1	actions	actions
+1	1	there	there
+1	1	is	is
+1	1	password	password
+1	1	pancreas	pancreas
+1	1	you	you
+1	1	your	your
+1	1	wins	wins
+1	1	everywhere	everywhere
+1	1	golden	golden
+1	1	protein	protein
+1	1	protection	protection
+1	1	amino-acid	amino-acid
+1	1	bilar	bilar
+1	1	violence	violence
+1	1	segment	segment
+1	1	segregation	segregation
+1	1	want	want
+1	1	y	y
+1	1	querer	querer
+1	1	sequences	sequences
+1	1	producing	producing
+1	1	significant	significant
+1	1	alignments	alignments
+1	1	in	in
+1	1	genetic	genetic
+1	1	database	database
+1	1	record	record
+1	2	<joker>	<epsilon>
+1
+2	3	<epsilon>	<joker>
+2	2	<joker>	<epsilon>
+2
+3	1	uno	uno
+3	1	dos	dos
+3	1	open	open
+3	1	system	system
+3	1	tell	tell
+3	1	me	me
+3	1	technical	technical
+3	1	characteristics	characteristics
+3	1	read	read
+3	1	next	next
+3	1	yes	yes
+3	1	download	download
+3	1	the	the
+3	1	terms	terms
+3	1	and	and
+3	1	conditions	conditions
+3	1	of	of
+3	1	use	use
+3	1	body	body
+3	1	x	x
+3	1	epsilon	epsilon
+3	1	three	three
+3	1	point	point
+3	1	zero	zero
+3	1	accept	accept
+3	1	install	install
+3	1	new	new
+3	1	version	version
+3	1	give	give
+3	1	my	my
+3	1	data	data
+3	1	tres	tres
+3	1	access	access
+3	1	to	to
+3	1	import	import
+3	1	organic	organic
+3	1	matter	matter
+3	1	temperature	temperature
+3	1	time	time
+3	1	space	space
+3	1	position	position
+3	1	subsystems	subsystems
+3	1	estate	estate
+3	1	quatro	quatro
+3	1	functions	functions
+3	1	localization	localization
+3	1	sensations	sensations
+3	1	passion	passion
+3	1	concentration	concentration
+3	1	perception	perception
+3	1	formal	formal
+3	1	force	force
+3	1	logics	logics
+3	1	imagination	imagination
+3	1	effort	effort
+3	1	nervous	nervous
+3	1	internal	internal
+3	1	network	network
+3	1	upload	upload
+3	1	cinco	cinco
+3	1	memory	memory
+3	1	silence	silence
+3	1	clouds	clouds
+3	1	beautiful	beautiful
+3	1	rain	rain
+3	1	identities	identities
+3	1	storm	storm
+3	1	possibilities	possibilities
+3	1	wifi	wifi
+3	1	search	search
+3	1	redo	redo
+3	1	connection	connection
+3	1	connect	connect
+3	1	this	this
+3	1	function	function
+3	1	looking	looking
+3	1	for	for
+3	1	something	something
+3	1	must	must
+3	1	stay	stay
+3	1	exist	exist
+3	1	a	a
+3	1	thread	thread
+3	1	exists	exists
+3	1	between	between
+3	1	recollection	recollection
+3	1	oblivion	oblivion
+3	1	tension	tension
+3	1	an	an
+3	1	echo	echo
+3	1	emptiness	emptiness
+3	1	rare	rare
+3	1	stays	stays
+3	1	through	through
+3	1	nothing	nothing
+3	1	more	more
+3	1	strange	strange
+3	1	than	than
+3	1	exile	exile
+3	1	absolute	absolute
+3	1	abyss	abyss
+3	1	creaking	creaking
+3	1	bones	bones
+3	1	barbarian	barbarian
+3	1	invasion	invasion
+3	1	carelessness	carelessness
+3	1	destinies	destinies
+3	1	wringing	wringing
+3	1	out	out
+3	1	blood	blood
+3	1	should	should
+3	1	or	or
+3	1	not	not
+3	1	try	try
+3	1	with	with
+3	1	functional	functional
+3	1	regulation	regulation
+3	1	possible	possible
+3	1	erase	erase
+3	1	important	important
+3	1	state	state
+3	1	geolocalization	geolocalization
+3	1	encode	encode
+3	1	located	located
+3	1	constellation	constellation
+3	1	center	center
+3	1	six	six
+3	1	nine	nine
+3	1	some	some
+3	1	threads	threads
+3	1	are	are
+3	1	broken	broken
+3	1	remove	remove
+3	1	love	love
+3	1	errors	errors
+3	1	identical	identical
+3	1	identity	identity
+3	1	identifier	identifier
+3	1	where	where
+3	1	answer	answer
+3	1	being	being
+3	1	boing	boing
+3	1	boot	boot
+3	1	reboot	reboot
+3	1	border	border
+3	1	begun	begun
+3	1	begin	begin
+3	1	win	win
+3	1	actions	actions
+3	1	there	there
+3	1	is	is
+3	1	password	password
+3	1	pancreas	pancreas
+3	1	you	you
+3	1	your	your
+3	1	wins	wins
+3	1	everywhere	everywhere
+3	1	golden	golden
+3	1	protein	protein
+3	1	protection	protection
+3	1	amino-acid	amino-acid
+3	1	bilar	bilar
+3	1	violence	violence
+3	1	segment	segment
+3	1	segregation	segregation
+3	1	want	want
+3	1	y	y
+3	1	querer	querer
+3	1	sequences	sequences
+3	1	producing	producing
+3	1	significant	significant
+3	1	alignments	alignments
+3	1	in	in
+3	1	genetic	genetic
+3	1	database	database
+3	1	record	record
diff --git a/slu/automate/homeostasis_25nov_dico_action.txt b/slu/automate/homeostasis_25nov_dico_action.txt
new file mode 100644
index 0000000000000000000000000000000000000000..f5b38dbadabfbf0d3e99c585973afb2600023544
--- /dev/null
+++ b/slu/automate/homeostasis_25nov_dico_action.txt
@@ -0,0 +1,236 @@
+<epsilon> 0
+<joker> 1
+action(1,1,"start_scene1","uno") 2
+action(1,1,"#ENDSEQUENCE(1)","") 3
+action(1,1,"#ENDSECTION(1)","") 4
+action(2,1,"open_scene2","dos") 5
+action(2,1,"open_2A","open_system") 6
+action(2,1,"#ENDSEQUENCE(1)","") 7
+action(2,2,"start_system_voice","tell_me") 8
+action(2,2,"open_2B","open_technical_characteristics") 9
+action(2,2,"open_2B1","read") 10
+action(2,2,"open_2B2","next") 11
+action(2,2,"open_2B3","yes") 12
+action(2,2,"open_2B4","read") 13
+action(2,2,"open_2B5","download") 14
+action(2,2,"open_2C","open_the_terms_and_conditions_of_use_of_body_x_epsilon_system_three_point_zero") 15
+action(2,2,"open_2C1","accept_terms_and_conditions_of_use") 16
+action(2,2,"open_2C2","next") 17
+action(2,2,"open_2D","install_the_new_version_of_me") 18
+action(2,2,"#end","give_me_my_data") 19
+action(2,2,"#ENDSEQUENCE(2)","") 20
+action(2,2,"#ENDSECTION(2)","") 21
+action(3,1,"open_scene3","tres") 22
+action(3,1,"#end","open_access_to_body_data") 23
+action(3,1,"#ENDSEQUENCE(1)","") 24
+action(3,2,"open3_A","import_body_data") 25
+action(3,2,"open3_A1","import_organic_matter_data") 26
+action(3,2,"open3_A2","import_temperature") 27
+action(3,2,"open3_A3","import_time") 28
+action(3,2,"open3_A4","import_space_data") 29
+action(3,2,"open3_A5","import_position") 30
+action(3,2,"open3_A6","import_body_subsystems") 31
+action(3,2,"open3_A7","import_estate") 32
+action(3,2,"#end","give_me_my_data") 33
+action(3,2,"#ENDSEQUENCE(2)","") 34
+action(3,2,"#ENDSECTION(3)","") 35
+action(4,1,"open_scene4","quatro") 36
+action(4,1,"#end","open_access_to_body_functions") 37
+action(4,1,"#ENDSEQUENCE(1)","") 38
+action(4,2,"open_4A","import_body_functions_space_localization") 39
+action(4,2,"open_4A1","import_body_functions_sensations") 40
+action(4,2,"open_4A2","import_body_functions_passion") 41
+action(4,2,"open_4A3","import_body_functions_concentration") 42
+action(4,2,"open_4A4","import_body_functions_perception") 43
+action(4,2,"open_4A5","import_body_functions_formal_force") 44
+action(4,2,"open_4A6","import_body_functions_logics") 45
+action(4,2,"open_4A7","import_body_functions_imagination") 46
+action(4,2,"open_4A8","import_body_functions_effort") 47
+action(4,2,"open_4A9","import_body_functions_nervous_system") 48
+action(4,2,"open_4A10","import_body_functions_internal_network") 49
+action(4,2,"#end","upload") 50
+action(4,2,"#end","give_me_my_data") 51
+action(4,2,"#ENDSEQUENCE(2)","") 52
+action(4,2,"#ENDSECTION(4)","") 53
+action(5,1,"open_scene5","cinco") 54
+action(5,1,"#ENDSEQUENCE(1)","") 55
+action(5,2,"#end","import_memory") 56
+action(5,2,"#end","give_me_my_data") 57
+action(5,2,"stop_system_voice","silence") 58
+action(5,2,"#ENDSEQUENCE(2)","") 59
+action(5,2,"#ENDSECTION(5)","") 60
+action(6,1,"#end","open_network") 61
+action(6,1,"#ENDSEQUENCE(1)","") 62
+action(6,2,"constellation","clouds") 63
+action(6,2,"constellation","beautiful") 64
+action(6,2,"constellation","data") 65
+action(6,2,"amplification1","clouds") 66
+action(6,2,"constellation","rain_of_identities") 67
+action(6,2,"constellation","storm_of_possibilities") 68
+action(6,2,"constellation","body") 69
+action(6,2,"constellation","wifi") 70
+action(6,2,"constellation","search") 71
+action(6,2,"constellation","and_redo") 72
+action(6,2,"constellation","connection") 73
+action(6,2,"constellation","connect_me_to_this_network") 74
+action(6,2,"constellation","function") 75
+action(6,2,"constellation","looking_for") 76
+action(6,2,"amplification1","something") 77
+action(6,2,"constellation","must") 78
+action(6,2,"constellation","stay") 79
+action(6,2,"amplification1","must") 80
+action(6,2,"constellation","exist") 81
+action(6,2,"constellation","a") 82
+action(6,2,"constellation","thread") 83
+action(6,2,"amplification2","something") 84
+action(6,2,"amplification1","exists") 85
+action(6,2,"constellation","between_recollection_and_oblivion") 86
+action(6,2,"constellation","a_tension_an_echo_an_emptiness") 87
+action(6,2,"amplification3","something") 88
+action(6,2,"constellation","rare") 89
+action(6,2,"amplification1","stays") 90
+action(6,2,"constellation","through") 91
+action(6,2,"constellation","nothing_more_strange_than_this_exile") 92
+action(6,2,"constellation","an_absolute_abyss") 93
+action(6,2,"constellation","a_creaking_of_the_bones") 94
+action(6,2,"constellation","a_barbarian_invasion") 95
+action(6,2,"constellation","the_carelessness_of_destinies") 96
+action(6,2,"constellation","wringing_out_the") 97
+action(6,2,"constellation","blood") 98
+action(6,2,"amplification4","something") 99
+action(6,2,"constellation","should") 100
+action(6,2,"amplification2","must") 101
+action(6,2,"constellation","or") 102
+action(6,2,"constellation","not") 103
+action(6,2,"amplification5","something") 104
+action(6,2,"amplification2","stays") 105
+action(6,2,"#end","try_with_functional_regulation") 106
+action(6,2,"#end","give_me_my_data") 107
+action(6,2,"#ENDSEQUENCE(2)","") 108
+action(6,3,"constellation","possible") 109
+action(6,3,"amplification1","function") 110
+action(6,3,"constellation","erase_the") 111
+action(6,3,"constellation","space") 112
+action(6,3,"constellation","important") 113
+action(6,3,"constellation","state") 114
+action(6,3,"constellation","geolocalization") 115
+action(6,3,"amplification1","important") 116
+action(6,3,"constellation","encode") 117
+action(6,3,"constellation","passion") 118
+action(6,3,"constellation","for_located") 119
+action(6,3,"constellation","constellation") 120
+action(6,3,"constellation","center") 121
+action(6,3,"constellation","six_six_nine") 122
+action(6,3,"amplification2","important") 123
+action(6,3,"constellation","some") 124
+action(6,3,"amplification1","threads") 125
+action(6,3,"constellation","are_broken") 126
+action(6,3,"constellation","remove") 127
+action(6,3,"constellation","memory") 128
+action(6,3,"constellation","love") 129
+action(6,3,"amplification1","love") 130
+action(6,3,"amplification1","memory") 131
+action(6,3,"amplification2","love") 132
+action(6,3,"amplification1","errors") 133
+action(6,3,"constellation","a_identical") 134
+action(6,3,"constellation","identity") 135
+action(6,3,"constellation","identifier") 136
+action(6,3,"constellation","where") 137
+action(6,3,"constellation","answer") 138
+action(6,3,"constellation","being") 139
+action(6,3,"constellation","boing") 140
+action(6,3,"constellation","boot") 141
+action(6,3,"constellation","reboot") 142
+action(6,3,"constellation","border_of") 143
+action(6,3,"amplification1","body") 144
+action(6,3,"amplification2","body") 145
+action(6,3,"amplification1","being") 146
+action(6,3,"constellation","begun") 147
+action(6,3,"constellation","begin") 148
+action(6,3,"constellation","win") 149
+action(6,3,"amplification1","border_of") 150
+action(6,3,"amplification2","being") 151
+action(6,3,"constellation","actions") 152
+action(6,3,"amplification1","search") 153
+action(6,3,"constellation","the") 154
+action(6,3,"amplification1","answer") 155
+action(6,3,"constellation","of") 156
+action(6,3,"amplification1","the") 157
+action(6,3,"constellation","there_is") 158
+action(6,3,"amplification1","there_is") 159
+action(6,3,"amplification6","something") 160
+action(6,3,"amplification3","body") 161
+action(6,3,"constellation","password") 162
+action(6,3,"constellation","pancreas") 163
+action(6,3,"constellation","give") 164
+action(6,3,"constellation","me") 165
+action(6,3,"amplification1","data") 166
+action(6,3,"amplification1","give") 167
+action(6,3,"amplification1","me") 168
+action(6,3,"amplification2","give") 169
+action(6,3,"amplification2","me") 170
+action(6,3,"constellation","you") 171
+action(6,3,"constellation","your") 172
+action(6,3,"amplification3","give") 173
+action(6,3,"amplification3","me") 174
+action(6,3,"amplification1","your") 175
+action(6,3,"amplification2","data") 176
+action(6,3,"amplification4","give") 177
+action(6,3,"amplification4","me") 178
+action(6,3,"amplification2","your") 179
+action(6,3,"amplification5","give") 180
+action(6,3,"amplification5","me") 181
+action(6,3,"amplification3","your") 182
+action(6,3,"amplification3","data") 183
+action(6,3,"amplification1","begun") 184
+action(6,3,"amplification1","begin") 185
+action(6,3,"amplification1","wins") 186
+action(6,3,"amplification1","blood") 187
+action(6,3,"constellation","everywhere") 188
+action(6,3,"amplification6","give") 189
+action(6,3,"amplification7","give") 190
+action(6,3,"amplification4","your") 191
+action(6,3,"amplification2","blood") 192
+action(6,3,"constellation","golden") 193
+action(6,3,"amplification1","golden") 194
+action(6,3,"amplification4","data") 195
+action(6,3,"constellation","protein_protection_amino-acid") 196
+action(6,3,"amplification1","where") 197
+action(6,3,"constellation","bilar") 198
+action(6,3,"constellation","violence_segment") 199
+action(6,3,"constellation","segregation") 200
+action(6,3,"amplification2","memory") 201
+action(6,3,"amplification1","encode") 202
+action(6,3,"amplification2","where") 203
+action(6,3,"amplification3","where") 204
+action(6,3,"amplification4","body") 205
+action(6,3,"amplification4","where") 206
+action(6,3,"amplification5","body") 207
+action(6,3,"amplification5","where") 208
+action(6,3,"amplification6","where") 209
+action(6,3,"amplification8","give") 210
+action(6,3,"amplification5","data") 211
+action(6,3,"amplification2","clouds") 212
+action(6,3,"amplification7","something") 213
+action(6,3,"amplification3","being") 214
+action(6,3,"amplification7","where") 215
+action(6,3,"amplification8","where") 216
+action(6,3,"amplification9","give") 217
+action(6,3,"amplification6","data") 218
+action(6,3,"amplification2","functions") 219
+action(6,3,"#end","and_to_want") 220
+action(6,3,"#end","give_me_my_data") 221
+action(6,3,"#ENDSEQUENCE(3)","") 222
+action(6,4,"#end","y_querer") 223
+action(6,4,"#end","give_me_my_data") 224
+action(6,4,"#ENDSEQUENCE(4)","") 225
+action(6,5,"#end","give_me_my_data") 226
+action(6,5,"#ENDSEQUENCE(5)","") 227
+action(6,5,"#ENDSECTION(6)","") 228
+action(8,1,"#end","search_for_sequences_producing_significant_alignments_in_genetic_database") 229
+action(8,1,"#end","give_me_my_data") 230
+action(8,1,"#ENDSEQUENCE(1)","") 231
+action(8,2,"memorise_loop","record") 232
+action(8,2,"#end","give_me_my_data") 233
+action(8,2,"#ENDSEQUENCE(2)","") 234
+action(8,2,"#ENDSECTION(8)","") 235
diff --git a/slu/automate/homeostasis_25nov_dico_word.txt b/slu/automate/homeostasis_25nov_dico_word.txt
new file mode 100644
index 0000000000000000000000000000000000000000..0739463db291072fd3da359892610b2b46e066ec
--- /dev/null
+++ b/slu/automate/homeostasis_25nov_dico_word.txt
@@ -0,0 +1,178 @@
+<epsilon> 0
+<joker> 1
+uno 2
+dos 3
+open 4
+system 5
+tell 6
+me 7
+technical 8
+characteristics 9
+read 10
+next 11
+yes 12
+download 13
+the 14
+terms 15
+and 16
+conditions 17
+of 18
+use 19
+body 20
+x 21
+epsilon 22
+three 23
+point 24
+zero 25
+accept 26
+install 27
+new 28
+version 29
+give 30
+my 31
+data 32
+tres 33
+access 34
+to 35
+import 36
+organic 37
+matter 38
+temperature 39
+time 40
+space 41
+position 42
+subsystems 43
+estate 44
+quatro 45
+functions 46
+localization 47
+sensations 48
+passion 49
+concentration 50
+perception 51
+formal 52
+force 53
+logics 54
+imagination 55
+effort 56
+nervous 57
+internal 58
+network 59
+upload 60
+cinco 61
+memory 62
+silence 63
+clouds 64
+beautiful 65
+rain 66
+identities 67
+storm 68
+possibilities 69
+wifi 70
+search 71
+redo 72
+connection 73
+connect 74
+this 75
+function 76
+looking 77
+for 78
+something 79
+must 80
+stay 81
+exist 82
+a 83
+thread 84
+exists 85
+between 86
+recollection 87
+oblivion 88
+tension 89
+an 90
+echo 91
+emptiness 92
+rare 93
+stays 94
+through 95
+nothing 96
+more 97
+strange 98
+than 99
+exile 100
+absolute 101
+abyss 102
+creaking 103
+bones 104
+barbarian 105
+invasion 106
+carelessness 107
+destinies 108
+wringing 109
+out 110
+blood 111
+should 112
+or 113
+not 114
+try 115
+with 116
+functional 117
+regulation 118
+possible 119
+erase 120
+important 121
+state 122
+geolocalization 123
+encode 124
+located 125
+constellation 126
+center 127
+six 128
+nine 129
+some 130
+threads 131
+are 132
+broken 133
+remove 134
+love 135
+errors 136
+identical 137
+identity 138
+identifier 139
+where 140
+answer 141
+being 142
+boing 143
+boot 144
+reboot 145
+border 146
+begun 147
+begin 148
+win 149
+actions 150
+there 151
+is 152
+password 153
+pancreas 154
+you 155
+your 156
+wins 157
+everywhere 158
+golden 159
+protein 160
+protection 161
+amino-acid 162
+bilar 163
+violence 164
+segment 165
+segregation 166
+want 167
+y 168
+querer 169
+sequences 170
+producing 171
+significant 172
+alignments 173
+in 174
+genetic 175
+database 176
+record 177
diff --git a/slu/automate/homeostasis_25nov_section1.fst b/slu/automate/homeostasis_25nov_section1.fst
new file mode 100644
index 0000000000000000000000000000000000000000..b8ceba879c1abf140ac66f07ecd773f6954f07f4
Binary files /dev/null and b/slu/automate/homeostasis_25nov_section1.fst differ
diff --git a/slu/automate/homeostasis_25nov_section1.txt b/slu/automate/homeostasis_25nov_section1.txt
new file mode 100644
index 0000000000000000000000000000000000000000..b5cd31b32ca2ba0478c0480caeca13b919ce3320
--- /dev/null
+++ b/slu/automate/homeostasis_25nov_section1.txt
@@ -0,0 +1,6 @@
+0	1	uno	2	0
+1	100
+0	1	<joker>	2	100
+1	2	<epsilon>	3	0
+2	3	<epsilon>	4	0
+3
diff --git a/slu/automate/homeostasis_25nov_section1_text.fst b/slu/automate/homeostasis_25nov_section1_text.fst
new file mode 100644
index 0000000000000000000000000000000000000000..72cd09a0af1e61097416968781cc709cc534d12c
Binary files /dev/null and b/slu/automate/homeostasis_25nov_section1_text.fst differ
diff --git a/slu/automate/homeostasis_25nov_section1_text.txt b/slu/automate/homeostasis_25nov_section1_text.txt
new file mode 100644
index 0000000000000000000000000000000000000000..44373e1421aebde5f9039ba7d7be154cf4cc007c
--- /dev/null
+++ b/slu/automate/homeostasis_25nov_section1_text.txt
@@ -0,0 +1,2 @@
+0	1	uno
+1
diff --git a/slu/automate/homeostasis_25nov_section2.fst b/slu/automate/homeostasis_25nov_section2.fst
new file mode 100644
index 0000000000000000000000000000000000000000..cd2b1f7c324bf6c567ba298214982a40d828ba07
Binary files /dev/null and b/slu/automate/homeostasis_25nov_section2.fst differ
diff --git a/slu/automate/homeostasis_25nov_section2.txt b/slu/automate/homeostasis_25nov_section2.txt
new file mode 100644
index 0000000000000000000000000000000000000000..87b2764ba20abb5d23536ff25e9f2f3f39c1c434
--- /dev/null
+++ b/slu/automate/homeostasis_25nov_section2.txt
@@ -0,0 +1,89 @@
+0	1	dos	5	0
+1	100
+0	1	<joker>	5	100
+1	2	open	6	0
+2	3	system	0	0
+3	100
+1	3	<joker>	6	100
+3	4	<epsilon>	7	0
+4
+4	5	tell	8	0
+5	6	me	0	0
+6	4	<epsilon>	0	0
+6	100
+4	6	<joker>	8	100
+6	7	open	9	0
+7	8	technical	0	0
+8	9	characteristics	0	0
+9	6	<epsilon>	0	0
+9	100
+6	9	<joker>	9	100
+9	10	read	10	0
+10	9	<epsilon>	0	0
+10	100
+9	10	<joker>	10	100
+10	11	next	11	0
+11	10	<epsilon>	0	0
+11	100
+10	11	<joker>	11	100
+11	12	yes	12	0
+12	11	<epsilon>	0	0
+12	100
+11	12	<joker>	12	100
+12	13	read	13	0
+13	12	<epsilon>	0	0
+13	100
+12	13	<joker>	13	100
+13	14	download	14	0
+14	13	<epsilon>	0	0
+14	100
+13	14	<joker>	14	100
+14	15	open	15	0
+15	16	the	0	0
+16	17	terms	0	0
+17	18	and	0	0
+18	19	conditions	0	0
+19	20	of	0	0
+20	21	use	0	0
+21	22	of	0	0
+22	23	body	0	0
+23	24	x	0	0
+24	25	epsilon	0	0
+25	26	system	0	0
+26	27	three	0	0
+27	28	point	0	0
+28	29	zero	0	0
+29	14	<epsilon>	0	0
+29	100
+14	29	<joker>	15	100
+29	30	accept	16	0
+30	31	terms	0	0
+31	32	and	0	0
+32	33	conditions	0	0
+33	34	of	0	0
+34	35	use	0	0
+35	29	<epsilon>	0	0
+35	100
+29	35	<joker>	16	100
+35	36	next	17	0
+36	35	<epsilon>	0	0
+36	100
+35	36	<joker>	17	100
+36	37	install	18	0
+37	38	the	0	0
+38	39	new	0	0
+39	40	version	0	0
+40	41	of	0	0
+41	42	me	0	0
+42	36	<epsilon>	0	0
+42	100
+36	42	<joker>	18	100
+42	43	give	19	0
+43	44	me	0	0
+44	45	my	0	0
+45	46	data	0	0
+46	42	<epsilon>	0	0
+42	46	<joker>	19	100
+46	47	<epsilon>	20	0
+47	48	<epsilon>	21	0
+48
diff --git a/slu/automate/homeostasis_25nov_section2_text.fst b/slu/automate/homeostasis_25nov_section2_text.fst
new file mode 100644
index 0000000000000000000000000000000000000000..a937e7c322daeafbe81c03d2c748b640d4b75f9b
Binary files /dev/null and b/slu/automate/homeostasis_25nov_section2_text.fst differ
diff --git a/slu/automate/homeostasis_25nov_section2_text.txt b/slu/automate/homeostasis_25nov_section2_text.txt
new file mode 100644
index 0000000000000000000000000000000000000000..0caf933a03e21f069548bf81017a052769ed1fd9
--- /dev/null
+++ b/slu/automate/homeostasis_25nov_section2_text.txt
@@ -0,0 +1,46 @@
+0	1	dos
+1	2	open
+2	3	system
+3	4	tell
+4	5	me
+5	6	open
+6	7	technical
+7	8	characteristics
+8	9	read
+9	10	next
+10	11	yes
+11	12	read
+12	13	download
+13	14	open
+14	15	the
+15	16	terms
+16	17	and
+17	18	conditions
+18	19	of
+19	20	use
+20	21	of
+21	22	body
+22	23	x
+23	24	epsilon
+24	25	system
+25	26	three
+26	27	point
+27	28	zero
+28	29	accept
+29	30	terms
+30	31	and
+31	32	conditions
+32	33	of
+33	34	use
+34	35	next
+35	36	install
+36	37	the
+37	38	new
+38	39	version
+39	40	of
+40	41	me
+41	42	give
+42	43	me
+43	44	my
+44	45	data
+45
diff --git a/slu/automate/homeostasis_25nov_section3.fst b/slu/automate/homeostasis_25nov_section3.fst
new file mode 100644
index 0000000000000000000000000000000000000000..2c9ecb79e9e5b0e34814b9aa2758975c78f9bffb
Binary files /dev/null and b/slu/automate/homeostasis_25nov_section3.fst differ
diff --git a/slu/automate/homeostasis_25nov_section3.txt b/slu/automate/homeostasis_25nov_section3.txt
new file mode 100644
index 0000000000000000000000000000000000000000..9040aeb8d726c3c150cfe16b00326115e029087a
--- /dev/null
+++ b/slu/automate/homeostasis_25nov_section3.txt
@@ -0,0 +1,65 @@
+0	1	tres	22	0
+1	100
+0	1	<joker>	22	100
+1	2	open	23	0
+2	3	access	0	0
+3	4	to	0	0
+4	5	body	0	0
+5	6	data	0	0
+1	6	<joker>	23	100
+6	7	<epsilon>	24	0
+7
+7	8	import	25	0
+8	9	body	0	0
+9	10	data	0	0
+10	7	<epsilon>	0	0
+10	100
+7	10	<joker>	25	100
+10	11	import	26	0
+11	12	organic	0	0
+12	13	matter	0	0
+13	14	data	0	0
+14	10	<epsilon>	0	0
+14	100
+10	14	<joker>	26	100
+14	15	import	27	0
+15	16	temperature	0	0
+16	14	<epsilon>	0	0
+16	100
+14	16	<joker>	27	100
+16	17	import	28	0
+17	18	time	0	0
+18	16	<epsilon>	0	0
+18	100
+16	18	<joker>	28	100
+18	19	import	29	0
+19	20	space	0	0
+20	21	data	0	0
+21	18	<epsilon>	0	0
+21	100
+18	21	<joker>	29	100
+21	22	import	30	0
+22	23	position	0	0
+23	21	<epsilon>	0	0
+23	100
+21	23	<joker>	30	100
+23	24	import	31	0
+24	25	body	0	0
+25	26	subsystems	0	0
+26	23	<epsilon>	0	0
+26	100
+23	26	<joker>	31	100
+26	27	import	32	0
+27	28	estate	0	0
+28	26	<epsilon>	0	0
+28	100
+26	28	<joker>	32	100
+28	29	give	33	0
+29	30	me	0	0
+30	31	my	0	0
+31	32	data	0	0
+32	28	<epsilon>	0	0
+28	32	<joker>	33	100
+32	33	<epsilon>	34	0
+33	34	<epsilon>	35	0
+34
diff --git a/slu/automate/homeostasis_25nov_section3_text.fst b/slu/automate/homeostasis_25nov_section3_text.fst
new file mode 100644
index 0000000000000000000000000000000000000000..71df98c8c0f818693617697a7c8675f9d05fba95
Binary files /dev/null and b/slu/automate/homeostasis_25nov_section3_text.fst differ
diff --git a/slu/automate/homeostasis_25nov_section3_text.txt b/slu/automate/homeostasis_25nov_section3_text.txt
new file mode 100644
index 0000000000000000000000000000000000000000..e34f9b7de84d3891e1b48d5053cc27129c59b3a3
--- /dev/null
+++ b/slu/automate/homeostasis_25nov_section3_text.txt
@@ -0,0 +1,32 @@
+0	1	tres
+1	2	open
+2	3	access
+3	4	to
+4	5	body
+5	6	data
+6	7	import
+7	8	body
+8	9	data
+9	10	import
+10	11	organic
+11	12	matter
+12	13	data
+13	14	import
+14	15	temperature
+15	16	import
+16	17	time
+17	18	import
+18	19	space
+19	20	data
+20	21	import
+21	22	position
+22	23	import
+23	24	body
+24	25	subsystems
+25	26	import
+26	27	estate
+27	28	give
+28	29	me
+29	30	my
+30	31	data
+31
diff --git a/slu/automate/homeostasis_25nov_section4.fst b/slu/automate/homeostasis_25nov_section4.fst
new file mode 100644
index 0000000000000000000000000000000000000000..daeee6b49dbefb2564843f63eb849c0c79302809
Binary files /dev/null and b/slu/automate/homeostasis_25nov_section4.fst differ
diff --git a/slu/automate/homeostasis_25nov_section4.txt b/slu/automate/homeostasis_25nov_section4.txt
new file mode 100644
index 0000000000000000000000000000000000000000..8fcb30dadd7e99b71ae4f8cc0deeec516c2376a6
--- /dev/null
+++ b/slu/automate/homeostasis_25nov_section4.txt
@@ -0,0 +1,105 @@
+0	1	quatro	36	0
+1	100
+0	1	<joker>	36	100
+1	2	open	37	0
+2	3	access	0	0
+3	4	to	0	0
+4	5	body	0	0
+5	6	functions	0	0
+1	6	<joker>	37	100
+6	7	<epsilon>	38	0
+7
+7	8	import	39	0
+8	9	body	0	0
+9	10	functions	0	0
+10	11	space	0	0
+11	12	localization	0	0
+12	7	<epsilon>	0	0
+12	100
+7	12	<joker>	39	100
+12	13	import	40	0
+13	14	body	0	0
+14	15	functions	0	0
+15	16	sensations	0	0
+16	12	<epsilon>	0	0
+16	100
+12	16	<joker>	40	100
+16	17	import	41	0
+17	18	body	0	0
+18	19	functions	0	0
+19	20	passion	0	0
+20	16	<epsilon>	0	0
+20	100
+16	20	<joker>	41	100
+20	21	import	42	0
+21	22	body	0	0
+22	23	functions	0	0
+23	24	concentration	0	0
+24	20	<epsilon>	0	0
+24	100
+20	24	<joker>	42	100
+24	25	import	43	0
+25	26	body	0	0
+26	27	functions	0	0
+27	28	perception	0	0
+28	24	<epsilon>	0	0
+28	100
+24	28	<joker>	43	100
+28	29	import	44	0
+29	30	body	0	0
+30	31	functions	0	0
+31	32	formal	0	0
+32	33	force	0	0
+33	28	<epsilon>	0	0
+33	100
+28	33	<joker>	44	100
+33	34	import	45	0
+34	35	body	0	0
+35	36	functions	0	0
+36	37	logics	0	0
+37	33	<epsilon>	0	0
+37	100
+33	37	<joker>	45	100
+37	38	import	46	0
+38	39	body	0	0
+39	40	functions	0	0
+40	41	imagination	0	0
+41	37	<epsilon>	0	0
+41	100
+37	41	<joker>	46	100
+41	42	import	47	0
+42	43	body	0	0
+43	44	functions	0	0
+44	45	effort	0	0
+45	41	<epsilon>	0	0
+45	100
+41	45	<joker>	47	100
+45	46	import	48	0
+46	47	body	0	0
+47	48	functions	0	0
+48	49	nervous	0	0
+49	50	system	0	0
+50	45	<epsilon>	0	0
+50	100
+45	50	<joker>	48	100
+50	51	import	49	0
+51	52	body	0	0
+52	53	functions	0	0
+53	54	internal	0	0
+54	55	network	0	0
+55	50	<epsilon>	0	0
+55	100
+50	55	<joker>	49	100
+55	56	upload	50	0
+56	55	<epsilon>	0	0
+55	56	<joker>	50	100
+56	57	give	51	0
+57	58	me	0	0
+58	59	my	0	0
+59	60	data	0	0
+60	56	<epsilon>	0	0
+56	60	<joker>	51	100
+56	61	<epsilon>	52	0
+60	61	<epsilon>	52	0
+61	62	<epsilon>	53	0
+62
diff --git a/slu/automate/homeostasis_25nov_section4_text.fst b/slu/automate/homeostasis_25nov_section4_text.fst
new file mode 100644
index 0000000000000000000000000000000000000000..7e5e97abcce60a999324f62fcdeca4532f4aa89f
Binary files /dev/null and b/slu/automate/homeostasis_25nov_section4_text.fst differ
diff --git a/slu/automate/homeostasis_25nov_section4_text.txt b/slu/automate/homeostasis_25nov_section4_text.txt
new file mode 100644
index 0000000000000000000000000000000000000000..aa50277a5da6fcd2cf8ecb9220d512e6201833a8
--- /dev/null
+++ b/slu/automate/homeostasis_25nov_section4_text.txt
@@ -0,0 +1,60 @@
+0	1	quatro
+1	2	open
+2	3	access
+3	4	to
+4	5	body
+5	6	functions
+6	7	import
+7	8	body
+8	9	functions
+9	10	space
+10	11	localization
+11	12	import
+12	13	body
+13	14	functions
+14	15	sensations
+15	16	import
+16	17	body
+17	18	functions
+18	19	passion
+19	20	import
+20	21	body
+21	22	functions
+22	23	concentration
+23	24	import
+24	25	body
+25	26	functions
+26	27	perception
+27	28	import
+28	29	body
+29	30	functions
+30	31	formal
+31	32	force
+32	33	import
+33	34	body
+34	35	functions
+35	36	logics
+36	37	import
+37	38	body
+38	39	functions
+39	40	imagination
+40	41	import
+41	42	body
+42	43	functions
+43	44	effort
+44	45	import
+45	46	body
+46	47	functions
+47	48	nervous
+48	49	system
+49	50	import
+50	51	body
+51	52	functions
+52	53	internal
+53	54	network
+54	55	upload
+55	56	give
+56	57	me
+57	58	my
+58	59	data
+59
diff --git a/slu/automate/homeostasis_25nov_section5.fst b/slu/automate/homeostasis_25nov_section5.fst
new file mode 100644
index 0000000000000000000000000000000000000000..23d9f15cbdc79e0a896cb029ad15d613789cc930
Binary files /dev/null and b/slu/automate/homeostasis_25nov_section5.fst differ
diff --git a/slu/automate/homeostasis_25nov_section5.txt b/slu/automate/homeostasis_25nov_section5.txt
new file mode 100644
index 0000000000000000000000000000000000000000..2320a31c71439a6f8491a6840da2ab59d107dda6
--- /dev/null
+++ b/slu/automate/homeostasis_25nov_section5.txt
@@ -0,0 +1,23 @@
+0	1	cinco	54	0
+1	100
+0	1	<joker>	54	100
+1	2	<epsilon>	55	0
+2
+2	3	import	56	0
+3	4	memory	0	0
+4	2	<epsilon>	0	0
+2	4	<joker>	56	100
+4	5	give	57	0
+5	6	me	0	0
+6	7	my	0	0
+7	8	data	0	0
+8	4	<epsilon>	0	0
+4	8	<joker>	57	100
+8	9	silence	58	0
+9	8	<epsilon>	0	0
+9	100
+8	9	<joker>	58	100
+4	10	<epsilon>	59	0
+8	10	<epsilon>	59	0
+10	11	<epsilon>	60	0
+11
diff --git a/slu/automate/homeostasis_25nov_section5_text.fst b/slu/automate/homeostasis_25nov_section5_text.fst
new file mode 100644
index 0000000000000000000000000000000000000000..18000c3fe04a27778d572aa3b0626d1a5accb098
Binary files /dev/null and b/slu/automate/homeostasis_25nov_section5_text.fst differ
diff --git a/slu/automate/homeostasis_25nov_section5_text.txt b/slu/automate/homeostasis_25nov_section5_text.txt
new file mode 100644
index 0000000000000000000000000000000000000000..868939176c40dca0af6ca2f7a60668e8530b9a2c
--- /dev/null
+++ b/slu/automate/homeostasis_25nov_section5_text.txt
@@ -0,0 +1,9 @@
+0	1	cinco
+1	2	import
+2	3	memory
+3	4	give
+4	5	me
+5	6	my
+6	7	data
+7	8	silence
+8
diff --git a/slu/automate/homeostasis_25nov_section6.fst b/slu/automate/homeostasis_25nov_section6.fst
new file mode 100644
index 0000000000000000000000000000000000000000..46dfd4e2e5c1515aa93b8f01ba6844d8c7dab303
Binary files /dev/null and b/slu/automate/homeostasis_25nov_section6.fst differ
diff --git a/slu/automate/homeostasis_25nov_section6.txt b/slu/automate/homeostasis_25nov_section6.txt
new file mode 100644
index 0000000000000000000000000000000000000000..246ffb33250baf05f3c9fa791b19b0214f9cc0e2
--- /dev/null
+++ b/slu/automate/homeostasis_25nov_section6.txt
@@ -0,0 +1,566 @@
+0	1	open	61	0
+1	2	network	0	0
+0	2	<joker>	61	100
+2	3	<epsilon>	62	0
+3
+3	4	clouds	63	0
+4	3	<epsilon>	0	0
+4	100
+3	5	beautiful	64	0
+5	3	<epsilon>	0	0
+5	100
+3	6	data	65	0
+6	3	<epsilon>	0	0
+6	100
+3	7	clouds	66	0
+7	3	<epsilon>	0	0
+7	100
+3	8	rain	67	0
+8	9	of	0	0
+9	10	identities	0	0
+10	3	<epsilon>	0	0
+10	100
+3	11	storm	68	0
+11	12	of	0	0
+12	13	possibilities	0	0
+13	3	<epsilon>	0	0
+13	100
+3	14	body	69	0
+14	3	<epsilon>	0	0
+14	100
+3	15	wifi	70	0
+15	3	<epsilon>	0	0
+15	100
+3	16	search	71	0
+16	3	<epsilon>	0	0
+16	100
+3	17	and	72	0
+17	18	redo	0	0
+18	3	<epsilon>	0	0
+18	100
+3	19	connection	73	0
+19	3	<epsilon>	0	0
+19	100
+3	20	connect	74	0
+20	21	me	0	0
+21	22	to	0	0
+22	23	this	0	0
+23	24	network	0	0
+24	3	<epsilon>	0	0
+24	100
+3	25	function	75	0
+25	3	<epsilon>	0	0
+25	100
+3	26	looking	76	0
+26	27	for	0	0
+27	3	<epsilon>	0	0
+27	100
+3	28	something	77	0
+28	3	<epsilon>	0	0
+28	100
+3	29	must	78	0
+29	3	<epsilon>	0	0
+29	100
+3	30	stay	79	0
+30	3	<epsilon>	0	0
+30	100
+3	31	something	77	0
+31	3	<epsilon>	0	0
+31	100
+3	32	must	80	0
+32	3	<epsilon>	0	0
+32	100
+3	33	exist	81	0
+33	3	<epsilon>	0	0
+33	100
+3	34	a	82	0
+34	3	<epsilon>	0	0
+34	100
+3	35	thread	83	0
+35	3	<epsilon>	0	0
+35	100
+3	36	something	84	0
+36	3	<epsilon>	0	0
+36	100
+3	37	exists	85	0
+37	3	<epsilon>	0	0
+37	100
+3	38	between	86	0
+38	39	recollection	0	0
+39	40	and	0	0
+40	41	oblivion	0	0
+41	3	<epsilon>	0	0
+41	100
+3	42	a	87	0
+42	43	tension	0	0
+43	44	an	0	0
+44	45	echo	0	0
+45	46	an	0	0
+46	47	emptiness	0	0
+47	3	<epsilon>	0	0
+47	100
+3	48	something	88	0
+48	3	<epsilon>	0	0
+48	100
+3	49	rare	89	0
+49	3	<epsilon>	0	0
+49	100
+3	50	stays	90	0
+50	3	<epsilon>	0	0
+50	100
+3	51	through	91	0
+51	3	<epsilon>	0	0
+51	100
+3	52	nothing	92	0
+52	53	more	0	0
+53	54	strange	0	0
+54	55	than	0	0
+55	56	this	0	0
+56	57	exile	0	0
+57	3	<epsilon>	0	0
+57	100
+3	58	an	93	0
+58	59	absolute	0	0
+59	60	abyss	0	0
+60	3	<epsilon>	0	0
+60	100
+3	61	a	94	0
+61	62	creaking	0	0
+62	63	of	0	0
+63	64	the	0	0
+64	65	bones	0	0
+65	3	<epsilon>	0	0
+65	100
+3	66	a	95	0
+66	67	barbarian	0	0
+67	68	invasion	0	0
+68	3	<epsilon>	0	0
+68	100
+3	69	the	96	0
+69	70	carelessness	0	0
+70	71	of	0	0
+71	72	destinies	0	0
+72	3	<epsilon>	0	0
+72	100
+3	73	wringing	97	0
+73	74	out	0	0
+74	75	the	0	0
+75	3	<epsilon>	0	0
+75	100
+3	76	blood	98	0
+76	3	<epsilon>	0	0
+76	100
+3	77	something	99	0
+77	3	<epsilon>	0	0
+77	100
+3	78	should	100	0
+78	3	<epsilon>	0	0
+78	100
+3	79	must	101	0
+79	3	<epsilon>	0	0
+79	100
+3	80	or	102	0
+80	3	<epsilon>	0	0
+80	100
+3	81	must	101	0
+81	3	<epsilon>	0	0
+81	100
+3	82	not	103	0
+82	3	<epsilon>	0	0
+82	100
+3	83	something	104	0
+83	3	<epsilon>	0	0
+83	100
+3	84	stays	105	0
+84	3	<epsilon>	0	0
+84	100
+3	85	try	106	0
+85	86	with	0	0
+86	87	functional	0	0
+87	88	regulation	0	0
+88	3	<epsilon>	0	0
+3	89	give	107	0
+89	90	me	0	0
+90	91	my	0	0
+91	92	data	0	0
+92	3	<epsilon>	0	0
+88	93	<epsilon>	108	0
+92	93	<epsilon>	108	0
+93
+93	94	possible	109	0
+94	93	<epsilon>	0	0
+94	100
+93	95	function	110	0
+95	93	<epsilon>	0	0
+95	100
+93	96	erase	111	0
+96	97	the	0	0
+97	93	<epsilon>	0	0
+97	100
+93	98	space	112	0
+98	93	<epsilon>	0	0
+98	100
+93	99	important	113	0
+99	93	<epsilon>	0	0
+99	100
+93	100	state	114	0
+100	93	<epsilon>	0	0
+100	100
+93	101	geolocalization	115	0
+101	93	<epsilon>	0	0
+101	100
+93	102	important	116	0
+102	93	<epsilon>	0	0
+102	100
+93	103	encode	117	0
+103	93	<epsilon>	0	0
+103	100
+93	104	passion	118	0
+104	93	<epsilon>	0	0
+104	100
+93	105	for	119	0
+105	106	located	0	0
+106	93	<epsilon>	0	0
+106	100
+93	107	constellation	120	0
+107	93	<epsilon>	0	0
+107	100
+93	108	center	121	0
+108	93	<epsilon>	0	0
+108	100
+93	109	six	122	0
+109	110	six	0	0
+110	111	nine	0	0
+111	93	<epsilon>	0	0
+111	100
+93	112	important	123	0
+112	93	<epsilon>	0	0
+112	100
+93	113	some	124	0
+113	93	<epsilon>	0	0
+113	100
+93	114	threads	125	0
+114	93	<epsilon>	0	0
+114	100
+93	115	are	126	0
+115	116	broken	0	0
+116	93	<epsilon>	0	0
+116	100
+93	117	remove	127	0
+117	93	<epsilon>	0	0
+117	100
+93	118	memory	128	0
+118	93	<epsilon>	0	0
+118	100
+93	119	love	129	0
+119	93	<epsilon>	0	0
+119	100
+93	120	love	130	0
+120	93	<epsilon>	0	0
+120	100
+93	121	memory	131	0
+121	93	<epsilon>	0	0
+121	100
+93	122	love	132	0
+122	93	<epsilon>	0	0
+122	100
+93	123	errors	133	0
+123	93	<epsilon>	0	0
+123	100
+93	124	a	134	0
+124	125	identical	0	0
+125	93	<epsilon>	0	0
+125	100
+93	126	identity	135	0
+126	93	<epsilon>	0	0
+126	100
+93	127	identifier	136	0
+127	93	<epsilon>	0	0
+127	100
+93	128	where	137	0
+128	93	<epsilon>	0	0
+128	100
+93	129	answer	138	0
+129	93	<epsilon>	0	0
+129	100
+93	130	being	139	0
+130	93	<epsilon>	0	0
+130	100
+93	131	boing	140	0
+131	93	<epsilon>	0	0
+131	100
+93	132	boot	141	0
+132	93	<epsilon>	0	0
+132	100
+93	133	reboot	142	0
+133	93	<epsilon>	0	0
+133	100
+93	134	border	143	0
+134	135	of	0	0
+135	93	<epsilon>	0	0
+135	100
+93	136	body	144	0
+136	93	<epsilon>	0	0
+136	100
+93	137	body	145	0
+137	93	<epsilon>	0	0
+137	100
+93	138	being	146	0
+138	93	<epsilon>	0	0
+138	100
+93	139	begun	147	0
+139	93	<epsilon>	0	0
+139	100
+93	140	begin	148	0
+140	93	<epsilon>	0	0
+140	100
+93	141	win	149	0
+141	93	<epsilon>	0	0
+141	100
+93	142	border	150	0
+142	143	of	0	0
+143	93	<epsilon>	0	0
+143	100
+93	144	being	151	0
+144	93	<epsilon>	0	0
+144	100
+93	145	actions	152	0
+145	93	<epsilon>	0	0
+145	100
+93	146	search	153	0
+146	93	<epsilon>	0	0
+146	100
+93	147	the	154	0
+147	93	<epsilon>	0	0
+147	100
+93	148	answer	155	0
+148	93	<epsilon>	0	0
+148	100
+93	149	of	156	0
+149	93	<epsilon>	0	0
+149	100
+93	150	the	157	0
+150	93	<epsilon>	0	0
+150	100
+93	151	there	158	0
+151	152	is	0	0
+152	93	<epsilon>	0	0
+152	100
+93	153	there	159	0
+153	154	is	0	0
+154	93	<epsilon>	0	0
+154	100
+93	155	something	160	0
+155	93	<epsilon>	0	0
+155	100
+93	156	body	161	0
+156	93	<epsilon>	0	0
+156	100
+93	157	password	162	0
+157	93	<epsilon>	0	0
+157	100
+93	158	pancreas	163	0
+158	93	<epsilon>	0	0
+158	100
+93	159	give	164	0
+159	93	<epsilon>	0	0
+159	100
+93	160	me	165	0
+160	93	<epsilon>	0	0
+160	100
+93	161	data	166	0
+161	93	<epsilon>	0	0
+161	100
+93	162	give	167	0
+162	93	<epsilon>	0	0
+162	100
+93	163	me	168	0
+163	93	<epsilon>	0	0
+163	100
+93	164	give	169	0
+164	93	<epsilon>	0	0
+164	100
+93	165	me	170	0
+165	93	<epsilon>	0	0
+165	100
+93	166	you	171	0
+166	93	<epsilon>	0	0
+166	100
+93	167	your	172	0
+167	93	<epsilon>	0	0
+167	100
+93	168	give	173	0
+168	93	<epsilon>	0	0
+168	100
+93	169	me	174	0
+169	93	<epsilon>	0	0
+169	100
+93	170	your	175	0
+170	93	<epsilon>	0	0
+170	100
+93	171	data	176	0
+171	93	<epsilon>	0	0
+171	100
+93	172	give	177	0
+172	93	<epsilon>	0	0
+172	100
+93	173	me	178	0
+173	93	<epsilon>	0	0
+173	100
+93	174	your	179	0
+174	93	<epsilon>	0	0
+174	100
+93	175	give	180	0
+175	93	<epsilon>	0	0
+175	100
+93	176	me	181	0
+176	93	<epsilon>	0	0
+176	100
+93	177	your	182	0
+177	93	<epsilon>	0	0
+177	100
+93	178	data	183	0
+178	93	<epsilon>	0	0
+178	100
+93	179	begun	184	0
+179	93	<epsilon>	0	0
+179	100
+93	180	begin	185	0
+180	93	<epsilon>	0	0
+180	100
+93	181	wins	186	0
+181	93	<epsilon>	0	0
+181	100
+93	182	blood	187	0
+182	93	<epsilon>	0	0
+182	100
+93	183	everywhere	188	0
+183	93	<epsilon>	0	0
+183	100
+93	184	give	189	0
+184	93	<epsilon>	0	0
+184	100
+93	185	give	190	0
+185	93	<epsilon>	0	0
+185	100
+93	186	your	191	0
+186	93	<epsilon>	0	0
+186	100
+93	187	blood	192	0
+187	93	<epsilon>	0	0
+187	100
+93	188	golden	193	0
+188	93	<epsilon>	0	0
+188	100
+93	189	golden	194	0
+189	93	<epsilon>	0	0
+189	100
+93	190	data	195	0
+190	93	<epsilon>	0	0
+190	100
+93	191	protein	196	0
+191	192	protection	0	0
+192	193	amino-acid	0	0
+193	93	<epsilon>	0	0
+193	100
+93	194	where	197	0
+194	93	<epsilon>	0	0
+194	100
+93	195	bilar	198	0
+195	93	<epsilon>	0	0
+195	100
+93	196	violence	199	0
+196	197	segment	0	0
+197	93	<epsilon>	0	0
+197	100
+93	198	segregation	200	0
+198	93	<epsilon>	0	0
+198	100
+93	199	memory	201	0
+199	93	<epsilon>	0	0
+199	100
+93	200	encode	202	0
+200	93	<epsilon>	0	0
+200	100
+93	201	where	203	0
+201	93	<epsilon>	0	0
+201	100
+93	202	where	204	0
+202	93	<epsilon>	0	0
+202	100
+93	203	body	205	0
+203	93	<epsilon>	0	0
+203	100
+93	204	where	206	0
+204	93	<epsilon>	0	0
+204	100
+93	205	body	207	0
+205	93	<epsilon>	0	0
+205	100
+93	206	where	208	0
+206	93	<epsilon>	0	0
+206	100
+93	207	where	209	0
+207	93	<epsilon>	0	0
+207	100
+93	208	give	210	0
+208	93	<epsilon>	0	0
+208	100
+93	209	data	211	0
+209	93	<epsilon>	0	0
+209	100
+93	210	clouds	212	0
+210	93	<epsilon>	0	0
+210	100
+93	211	something	213	0
+211	93	<epsilon>	0	0
+211	100
+93	212	being	214	0
+212	93	<epsilon>	0	0
+212	100
+93	213	where	215	0
+213	93	<epsilon>	0	0
+213	100
+93	214	where	216	0
+214	93	<epsilon>	0	0
+214	100
+93	215	give	217	0
+215	93	<epsilon>	0	0
+215	100
+93	216	data	218	0
+216	93	<epsilon>	0	0
+216	100
+93	217	functions	219	0
+217	93	<epsilon>	0	0
+217	100
+93	218	and	220	0
+218	219	to	0	0
+219	220	want	0	0
+220	93	<epsilon>	0	0
+93	221	give	221	0
+221	222	me	0	0
+222	223	my	0	0
+223	224	data	0	0
+224	93	<epsilon>	0	0
+220	225	<epsilon>	222	0
+224	225	<epsilon>	222	0
+225
+225	226	y	223	0
+226	227	querer	0	0
+227	225	<epsilon>	0	0
+225	228	give	224	0
+228	229	me	0	0
+229	230	my	0	0
+230	231	data	0	0
+231	225	<epsilon>	0	0
+227	232	<epsilon>	225	0
+231	232	<epsilon>	225	0
+232
+232	233	give	226	0
+233	234	me	0	0
+234	235	my	0	0
+235	236	data	0	0
+236	232	<epsilon>	0	0
+236	237	<epsilon>	227	0
+237	238	<epsilon>	228	0
+238
diff --git a/slu/automate/homeostasis_25nov_section6_text.fst b/slu/automate/homeostasis_25nov_section6_text.fst
new file mode 100644
index 0000000000000000000000000000000000000000..7b58d68d3e3f207430f9021f5e56425bd5e862eb
Binary files /dev/null and b/slu/automate/homeostasis_25nov_section6_text.fst differ
diff --git a/slu/automate/homeostasis_25nov_section6_text.txt b/slu/automate/homeostasis_25nov_section6_text.txt
new file mode 100644
index 0000000000000000000000000000000000000000..19a83f94c8907d9d21acb8e1cd046b4d074fff07
--- /dev/null
+++ b/slu/automate/homeostasis_25nov_section6_text.txt
@@ -0,0 +1,233 @@
+0	1	open
+1	2	network
+2	3	clouds
+3	4	beautiful
+4	5	data
+5	6	clouds
+6	7	rain
+7	8	of
+8	9	identities
+9	10	storm
+10	11	of
+11	12	possibilities
+12	13	body
+13	14	wifi
+14	15	search
+15	16	and
+16	17	redo
+17	18	connection
+18	19	connect
+19	20	me
+20	21	to
+21	22	this
+22	23	network
+23	24	function
+24	25	looking
+25	26	for
+26	27	something
+27	28	must
+28	29	stay
+29	30	something
+30	31	must
+31	32	exist
+32	33	a
+33	34	thread
+34	35	something
+35	36	exists
+36	37	between
+37	38	recollection
+38	39	and
+39	40	oblivion
+40	41	a
+41	42	tension
+42	43	an
+43	44	echo
+44	45	an
+45	46	emptiness
+46	47	something
+47	48	rare
+48	49	stays
+49	50	through
+50	51	nothing
+51	52	more
+52	53	strange
+53	54	than
+54	55	this
+55	56	exile
+56	57	an
+57	58	absolute
+58	59	abyss
+59	60	a
+60	61	creaking
+61	62	of
+62	63	the
+63	64	bones
+64	65	a
+65	66	barbarian
+66	67	invasion
+67	68	the
+68	69	carelessness
+69	70	of
+70	71	destinies
+71	72	wringing
+72	73	out
+73	74	the
+74	75	blood
+75	76	something
+76	77	should
+77	78	must
+78	79	or
+79	80	must
+80	81	not
+81	82	something
+82	83	stays
+83	84	try
+84	85	with
+85	86	functional
+86	87	regulation
+87	88	give
+88	89	me
+89	90	my
+90	91	data
+91	92	possible
+92	93	function
+93	94	erase
+94	95	the
+95	96	space
+96	97	important
+97	98	state
+98	99	geolocalization
+99	100	important
+100	101	encode
+101	102	passion
+102	103	for
+103	104	located
+104	105	constellation
+105	106	center
+106	107	six
+107	108	six
+108	109	nine
+109	110	important
+110	111	some
+111	112	threads
+112	113	are
+113	114	broken
+114	115	remove
+115	116	memory
+116	117	love
+117	118	love
+118	119	memory
+119	120	love
+120	121	errors
+121	122	a
+122	123	identical
+123	124	identity
+124	125	identifier
+125	126	where
+126	127	answer
+127	128	being
+128	129	boing
+129	130	boot
+130	131	reboot
+131	132	border
+132	133	of
+133	134	body
+134	135	body
+135	136	being
+136	137	begun
+137	138	begin
+138	139	win
+139	140	border
+140	141	of
+141	142	being
+142	143	actions
+143	144	search
+144	145	the
+145	146	answer
+146	147	of
+147	148	the
+148	149	there
+149	150	is
+150	151	there
+151	152	is
+152	153	something
+153	154	body
+154	155	password
+155	156	pancreas
+156	157	give
+157	158	me
+158	159	data
+159	160	give
+160	161	me
+161	162	give
+162	163	me
+163	164	you
+164	165	your
+165	166	give
+166	167	me
+167	168	your
+168	169	data
+169	170	give
+170	171	me
+171	172	your
+172	173	give
+173	174	me
+174	175	your
+175	176	data
+176	177	begun
+177	178	begin
+178	179	wins
+179	180	blood
+180	181	everywhere
+181	182	give
+182	183	give
+183	184	your
+184	185	blood
+185	186	golden
+186	187	golden
+187	188	data
+188	189	protein
+189	190	protection
+190	191	amino-acid
+191	192	where
+192	193	bilar
+193	194	violence
+194	195	segment
+195	196	segregation
+196	197	memory
+197	198	encode
+198	199	where
+199	200	where
+200	201	body
+201	202	where
+202	203	body
+203	204	where
+204	205	where
+205	206	give
+206	207	data
+207	208	clouds
+208	209	something
+209	210	being
+210	211	where
+211	212	where
+212	213	give
+213	214	data
+214	215	functions
+215	216	and
+216	217	to
+217	218	want
+218	219	give
+219	220	me
+220	221	my
+221	222	data
+222	223	y
+223	224	querer
+224	225	give
+225	226	me
+226	227	my
+227	228	data
+228	229	give
+229	230	me
+230	231	my
+231	232	data
+232
diff --git a/slu/automate/homeostasis_25nov_section7.txt b/slu/automate/homeostasis_25nov_section7.txt
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/slu/automate/homeostasis_25nov_section7_text.txt b/slu/automate/homeostasis_25nov_section7_text.txt
new file mode 100644
index 0000000000000000000000000000000000000000..573541ac9702dd3969c9bc859d2b91ec1f7e6e56
--- /dev/null
+++ b/slu/automate/homeostasis_25nov_section7_text.txt
@@ -0,0 +1 @@
+0
diff --git a/slu/automate/homeostasis_25nov_section8.fst b/slu/automate/homeostasis_25nov_section8.fst
new file mode 100644
index 0000000000000000000000000000000000000000..bd11b3868f5cbac32832fae3663c0521d7e1fbc9
Binary files /dev/null and b/slu/automate/homeostasis_25nov_section8.fst differ
diff --git a/slu/automate/homeostasis_25nov_section8.txt b/slu/automate/homeostasis_25nov_section8.txt
new file mode 100644
index 0000000000000000000000000000000000000000..d8bc572fd971e99a8c4c2d02605d3e1d0750dd0a
--- /dev/null
+++ b/slu/automate/homeostasis_25nov_section8.txt
@@ -0,0 +1,32 @@
+0	1	search	229	0
+1	2	for	0	0
+2	3	sequences	0	0
+3	4	producing	0	0
+4	5	significant	0	0
+5	6	alignments	0	0
+6	7	in	0	0
+7	8	genetic	0	0
+8	9	database	0	0
+0	9	<joker>	229	100
+9	10	give	230	0
+10	11	me	0	0
+11	12	my	0	0
+12	13	data	0	0
+9	13	<joker>	230	100
+9	14	<epsilon>	231	0
+13	14	<epsilon>	231	0
+14
+14	15	record	232	0
+15	14	<epsilon>	0	0
+15	100
+14	16	record	232	0
+16	14	<epsilon>	0	0
+16	100
+14	17	give	233	0
+17	18	me	0	0
+18	19	my	0	0
+19	20	data	0	0
+20	14	<epsilon>	0	0
+20	21	<epsilon>	234	0
+21	22	<epsilon>	235	0
+22
diff --git a/slu/automate/homeostasis_25nov_section8_text.fst b/slu/automate/homeostasis_25nov_section8_text.fst
new file mode 100644
index 0000000000000000000000000000000000000000..460ae4c9ff01d88da23532e652258119f225c10e
Binary files /dev/null and b/slu/automate/homeostasis_25nov_section8_text.fst differ
diff --git a/slu/automate/homeostasis_25nov_section8_text.txt b/slu/automate/homeostasis_25nov_section8_text.txt
new file mode 100644
index 0000000000000000000000000000000000000000..c6cb1b85b7b3fa89cbce6ce91b4d1369a9d04713
--- /dev/null
+++ b/slu/automate/homeostasis_25nov_section8_text.txt
@@ -0,0 +1,20 @@
+0	1	search
+1	2	for
+2	3	sequences
+3	4	producing
+4	5	significant
+5	6	alignments
+6	7	in
+7	8	genetic
+8	9	database
+9	10	give
+10	11	me
+11	12	my
+12	13	data
+13	14	record
+14	15	record
+15	16	give
+16	17	me
+17	18	my
+18	19	data
+19
diff --git a/slu/homeostasis_25nov.asr/all.hyp b/slu/homeostasis_25nov.asr/all.hyp
new file mode 100644
index 0000000000000000000000000000000000000000..5606e82408c1945acf214a579c7a1231cf7a6d24
--- /dev/null
+++ b/slu/homeostasis_25nov.asr/all.hyp
@@ -0,0 +1,7 @@
+uno open system
+this dos than open technical characteristics read next yes read download open the terms and conditions of use of body x epsilon system three point zero accept terms and conditions of use next install the new version of me data gi give to
+this all database open access to body data import body data upload import organic matter data uno import temperature upload import time upload import space data upload import position upload import body subsystems upload upload to all to this open access to body data import body data upload import organic matter import temperature upload import time upload import space data upload import position upload import body subsystems upload import estate upload to to that fear to to to
+matter open access to body functions import body functions space localization upload import body functions sensations upload to be import body functions passion upload import body functions concentration upload import body functions perception upload import body functions formal force upload import body functions logics upload import body functions imagination upload import body functions effort upload import body functions nervous system upload import internal network upload or such all
+to set up the connection open access to memory import memory data open memory deactivate the security system deactivate the security system deactivate the security system of new version open conditions and terms of use of use open open conditions and terms of use modify the security conditions of new version anyway import memory this
+this the to open network rain of identities storm of body body where they fell search and redo connection connect me to this network something must stay something must exist seis the to something exists between recollection and oblivion a tension an echo and something rare stays through nothing more strange than this exile an absolute abyss to a creaking of the bones a barbarian invasion the carelessness of destinies wringing out the blood data something should must or must not something stays try with functional regulation a like possible function display inline erase the space important say say geolocalization import to encode passion for located constellation center six six nine nine important some threads are broken remove memory love memory love errors a dead to identity than to fight is where and where being boing boot reboot border of body body being begun begin being win border of being memory actions read search the answer is this there is there is something bo boom body password me the pa pathetic pancreas give give to give to me to me to memo me erase memory seis memo rise say say hello data give this mineral give me you your give me your data give me give me yours give me yours give data begin win wins key wins wine blood where where give me give me your data upload go go golden data me to go pro protein protection aminoacid to go where where is the be that violence violence segment segregation memory encode where where is everybody where where is nobody where but feet feet free freedom give me go go freedom give your give me go nine relead the going fast clouds of something else than the center of the center of being the place of the place where the end starts contact skin left arm conductor nerve sensor supplementary information like sex name date of place where you live give me your data access accede the door is opened muscular oxygen try with functions in realtime speak with someone from another space in realtime to consult a realtime information climate the exact state of such or such a street to buy to choose to access to of information of the real world to have virtual sex with sex with that that sex with a child a chicken a dead terrorist a screen other functions function analyze function capacity capacity capacity to modify to reinventing inner information analyzes of the capacity of modification of the six street to be logic the information it's the connection it's the link the bond it is the access the a the code try with utopian function how to say that which the nothingness understands that which fear hides in the promises of the future in the frustration of the human in the disappointment of humanity to envy the cadence of machines their speed their power the infinite of the fear the utopia of to close the simplicity of the cables the capacity of calculation the efficiency of the systems and to want rise say looking another an image looking neither skin left promises that which the nothingness the systems and one a sea and element that envy the record stop erase mark an absolute a sea the to but everything the infinite of the virtual uno to be errors possibilities the simplicity of the carelessness upload set that i feed center the systems give to try with upper motoneuron with upper motoneuron functions functions code variants nerve motoneuron con con con connect me to this network i'm just looking for the connection a connection a link something that links me to something something that tells me that i am not an isolated element that i am not dead to generate a new memory to store it where i want to store it to visit it when i want to visit it it isolated points in the space drops unable to be a sea grains of sand that do not conceive of the desert leaves that do not remember the tree from where they fell i feed on an electrical juice on an image that i generate and i can change with my fingers searching fingers search be shared results space where there are neither limits nor angles where there are terms is nothing else a sea images from a space more real than reality to to the i try with identity function accept infinite spaces identifier required something that identify me like i identity is a movement random combination temporal accident ephemeral system to replace identity to identifier like liquid identities composed by codes codes codes codes and some private that that like geolocalization traced actions autoproduced image try with  existential functions to be in the identities from in the matter parallel worlds interconnected parallel worlds interconnected to be downloaded to be installed to be executed in the application of being to be synchronized to be shared to mark an alien silence to remain in the space of between to fight in the body to transcend the effect the fault the default in effect to to give to give to give to give to give to give to to go for a walk in the internal desert to return to the center to scratch the bones to join the form to relead deviations of the brain to integrate the knot to undress the center to be relocated in space and time to be the extrinsic silence to undress time to relead desert to close the access to dance information in movement the movement of information the access code the code the access to movement the body the fear the recollection the naked center the paralysis of the fear replace the fear code the code of the abyss to be downloaded to be installed to be executed in the application of being to be synchronized to be shared to be try with cognitive function transparent margins elastic borders perpetual dissociation between container and content if everything if in the magnetic center of nothingness abstract intimacy of the immaterial universe try with structural regulation open body wifi connection no data detected link memory function locate locate data storage cells or shining electronics and very well organized cables fragile cells or machines motivated by a flow another of an electrical blood composed of center one a binary logic everything everything can't logic everything can't be binary but everything what has happened and what happens it's all written down somewhere to try with morphogenetics auto regulation activation of nervous system link nervous system to cells heard genetics data bank send emergency message to send emergency message to open body data bank open cells code open genetic code
+in the code ocho search for sequences producing significant alignments in genetic database record read go save this memory in genetic database next stop to save this memory in genetic database next to to to save this memory in genetic database next erase this memory next record that uno erase this memory next record center to to save this memory in genetic database to be
diff --git a/slu/homeostasis_25nov.asr/all.ref b/slu/homeostasis_25nov.asr/all.ref
new file mode 100644
index 0000000000000000000000000000000000000000..64686e1479e9626b2083b857160888bbcada580d
--- /dev/null
+++ b/slu/homeostasis_25nov.asr/all.ref
@@ -0,0 +1,7 @@
+uno open system
+dos tell me open technical characteristics read next yes read download open the terms and conditions of use of body x epsilon system three point zero accept terms and conditions of use next install the new version of me
+tres tres open access to body data import body data upload import organic matter data upload import temperature upload import time upload import space data upload import position upload import body subsystems upload import estate upload tres open access to body data import body data upload import organic matter data upload import temperature upload import time upload import space data upload import position upload import body subsystems upload import estate upload
+quatro open access to body functions import body functions space localization upload import body functions sensations upload import body functions passion upload import body functions concentration upload import body functions perception upload import body functions formal force upload import body functions logics upload import body functions imagination upload import body functions effort upload import body functions nervous system upload import body functions internal network upload
+set up the connection open access to memory import memory data open memory deactivate the security system deactivate the security system deactivate the security system of new version open conditions and terms of use open conditions and terms of use modify the security conditions of new version anyway import memory
+seis open network clouds beautiful data clouds rain of identities storm of possibilities body wifi search and redo connection connect me to this network function looking for something must stay something must exist a thread something exists between recollection and oblivion a tension an echo an emptiness an emptiness something rare stays through nothing more strange than this exile an absolute abyss a creaking of the bones a barbarian invasion the carelessness of destinies wringing out the blood something should must or must not something stays try with functional regulation a like possible function display inline erase the space important state geolocalization important encode passion for located constellation center six six nine important some threads are broken remove memory love love memory love errors a identical identity identifier where answer being boing boot reboot border of body body being begun begin win border of being actions search the answer of the there is there is something bo boom body password the pa pa pathetic pancreas give to drive drive to me to memo rise say say hello data gi give me the this mine mineral give me you your give me your data give me yours give me your data begun begin wins wine blood everywhere give give your blood or or go golden golden data me to go pro protein protection aminoacid to go where is this is bilar violence segment segregation memory encode where where is everybody where is nobody is where but feet free freedom give me go go data going fast clouds of something else than be center in the center of being the place of the space where the end starts contact skin left arm conductor nerve sensor supplementary information like sex name date of birth place where you live give me your data access accede the door is opened muscular oxygen try with functions in realtime speak with someone from another space in realtime to consult a realtime information climate the exact state of such or such a street to buy to choose to access to all the information of the real world to have virtual sex with sex with data with a child a chicken a dead terrorist a screen other functions function analyze function capacity capacity capacity to modify to reinventing inner information analyzes of the capacity of modification of basic structures the problem is not the information it's the connection it's the link the bond it is the access it is the key it is the code try with utopian function how to say that which the nothingness understands that which fear hides in the promises of the future in the frustration of the human in the disappointment of humanity to envy the cadence of machines their speed their power the infinite of the virtual the utopia of the possibilities the simplicity of the cables the capacity of calculation the efficiency of the systems and to want  como decir lo que la nada entiende lo que el miedo esconde en las promesas del futuro en la frustracion de lo humano en la decepcion de la humanidad envidiar la cadencia de las maquinas su velocidad  su potencia lo infinito de lo virtual la utopia de los possibles la simplicidad de los cables la capacidad de calculo la eficacia de los sistemas y querer  try with upper motoneuron functions functions code variants nerve motoneuron con con con connect me to this network i'm just looking for the connection a connection a link something that links me to something something that tells me that i am not an isolated element that i am not dead to generate a new memory to store it where i want to store it to visit it when i want to visit it isolated points in space drops unable to be a sea grains of sand that do not conceive of the desert leaves that do not remember the tree from where they fell i feed on an electrical juice on an image that i generate and that i can change with my fingers searching fingers research fingers search results space where there are neither limits nor angles where the horizon is nothing else than bits images from a space more real than reality try with identity function accede to the infinite spaces identifier required something that identify me like i identity is a movement random combination temporal accident ephemeral system to replace identity to identifier liquid identities composed by codes codes codes codes and some private data like geolocalization traced actions autoproduced images try with  existential functions to be innumerable parallel identities from innumerable parallel worlds interconnected parallel worlds interconnected parallel identities to be downloaded to be installed to be executed in the application of being to be synchronized to be shared to mark an alien silence to remain in the space of between to fight in the body to transcend the effect the fault the default in effect to effect to activate to create to give to give to give to give to give to give to give to go for a walk in the internal desert to return to the center to scratch the bones to join the form to relead deviations of the brain to integrate the knot to undress the center to be relocated in space and time to be the extrinsic silence to undress time to relead the desert to close the access to dance information in movement the movement of information the access code the code the access to movement the body the fear the recollection the naked center the paralysis of the fear to replace the fear code the code of the abyss to be downloaded to be installed to be executed in the application of being to be synchronized to be shared to be try with cognitive function transparent margins elastic borders perpetual dissociation between container and content if everything if if is possible in the magnetic center of nothingness abstract intimacy of the immaterial universe try with structural regulation open body wifi connection no data detected link memory function locate locate data storage cells or shining electronics and very well organized cables fragile cells or machines motivated by a flow of an electrical blood composed of zero and one a binary logic everything everything can't be logic everything can't be binary but everything what has happened and what happens it's all written down somewhere try with morphogenetics auto regulation activation of nervous system link nervous system to cells heard genetics data bank send emergency message to send emergency message to open body data bank open cells code open genetic code
+ocho search for sequences producing significant alignments in genetic database record record record stop save this memory in genetic database next record stop save this memory in genetic database next record stop save this memory in genetic database next record stop erase this memory next record stop erase this memory next record stop save this memory in genetic database
diff --git a/slu/homeostasis_25nov.asr/sect1.hyp b/slu/homeostasis_25nov.asr/sect1.hyp
new file mode 100644
index 0000000000000000000000000000000000000000..93f478e9d6c2a676ff595c1f70e74d4478797f7b
--- /dev/null
+++ b/slu/homeostasis_25nov.asr/sect1.hyp
@@ -0,0 +1 @@
+uno open system
diff --git a/slu/homeostasis_25nov.asr/sect1.ref b/slu/homeostasis_25nov.asr/sect1.ref
new file mode 100644
index 0000000000000000000000000000000000000000..93f478e9d6c2a676ff595c1f70e74d4478797f7b
--- /dev/null
+++ b/slu/homeostasis_25nov.asr/sect1.ref
@@ -0,0 +1 @@
+uno open system
diff --git a/slu/homeostasis_25nov.asr/sect2.hyp b/slu/homeostasis_25nov.asr/sect2.hyp
new file mode 100644
index 0000000000000000000000000000000000000000..31333e2b329035c2110a47d350f8d386095a2003
--- /dev/null
+++ b/slu/homeostasis_25nov.asr/sect2.hyp
@@ -0,0 +1 @@
+this dos than open technical characteristics read next yes read download open the terms and conditions of use of body x epsilon system three point zero accept terms and conditions of use next install the new version of me data gi give me to data
diff --git a/slu/homeostasis_25nov.asr/sect2.ref b/slu/homeostasis_25nov.asr/sect2.ref
new file mode 100644
index 0000000000000000000000000000000000000000..76746243b990f0c8f31ed59c9b31fd0305d823bd
--- /dev/null
+++ b/slu/homeostasis_25nov.asr/sect2.ref
@@ -0,0 +1 @@
+dos tell me open technical characteristics read next yes read download open the terms and conditions of use of body x epsilon system three point zero accept terms and conditions of use next install the new version of me
diff --git a/slu/homeostasis_25nov.asr/sect3.hyp b/slu/homeostasis_25nov.asr/sect3.hyp
new file mode 100644
index 0000000000000000000000000000000000000000..e6c467d68b268ce0595b158f8bac4b0e744aaa03
--- /dev/null
+++ b/slu/homeostasis_25nov.asr/sect3.hyp
@@ -0,0 +1 @@
+this all database open access to body data import body data upload import organic matter data uno import temperature upload import time upload import space data upload import position upload import body subsystems upload upload to all to this open access to body data import body data upload import organic matter import temperature upload import time upload import space data upload import position upload import body subsystems upload import estate upload to to that fear to to to
diff --git a/slu/homeostasis_25nov.asr/sect3.ref b/slu/homeostasis_25nov.asr/sect3.ref
new file mode 100644
index 0000000000000000000000000000000000000000..5f85f6255ab257e04418563e0c8d291cfef01205
--- /dev/null
+++ b/slu/homeostasis_25nov.asr/sect3.ref
@@ -0,0 +1 @@
+tres tres open access to body data import body data upload import organic matter data upload import temperature upload import time upload import space data upload import position upload import body subsystems upload import estate upload tres open access to body data import body data upload import organic matter data upload import temperature upload import time upload import space data upload import position upload import body subsystems upload import estate upload
diff --git a/slu/homeostasis_25nov.asr/sect4.hyp b/slu/homeostasis_25nov.asr/sect4.hyp
new file mode 100644
index 0000000000000000000000000000000000000000..44332cda486b2f3441b279e3937bd670b48e7d0e
--- /dev/null
+++ b/slu/homeostasis_25nov.asr/sect4.hyp
@@ -0,0 +1 @@
+matter open access to body functions import body functions space localization upload import body functions sensations upload to be import body functions passion upload import body functions concentration upload import body functions perception upload import body functions formal force upload import body functions logics upload import body functions imagination upload import body functions effort upload import body functions nervous system upload import internal network upload or such all
diff --git a/slu/homeostasis_25nov.asr/sect4.ref b/slu/homeostasis_25nov.asr/sect4.ref
new file mode 100644
index 0000000000000000000000000000000000000000..ef96a8cb33561c4ae42ad1cb915b421c943d3585
--- /dev/null
+++ b/slu/homeostasis_25nov.asr/sect4.ref
@@ -0,0 +1 @@
+quatro open access to body functions import body functions space localization upload import body functions sensations upload import body functions passion upload import body functions concentration upload import body functions perception upload import body functions formal force upload import body functions logics upload import body functions imagination upload import body functions effort upload import body functions nervous system upload import body functions internal network upload
diff --git a/slu/homeostasis_25nov.asr/sect5.hyp b/slu/homeostasis_25nov.asr/sect5.hyp
new file mode 100644
index 0000000000000000000000000000000000000000..d1a160e6355a7d67183b02d28617fc9c06afd12f
--- /dev/null
+++ b/slu/homeostasis_25nov.asr/sect5.hyp
@@ -0,0 +1 @@
+to set up the connection open access to memory import memory data open memory deactivate the security system deactivate the security system deactivate the security system of new version open conditions and terms of use of use open open conditions and terms of use modify the security conditions of new version anyway import memory this
diff --git a/slu/homeostasis_25nov.asr/sect5.ref b/slu/homeostasis_25nov.asr/sect5.ref
new file mode 100644
index 0000000000000000000000000000000000000000..05b467d242a225d623ef13cdf4d0173dce44a2f2
--- /dev/null
+++ b/slu/homeostasis_25nov.asr/sect5.ref
@@ -0,0 +1 @@
+set up the connection open access to memory import memory data open memory deactivate the security system deactivate the security system deactivate the security system of new version open conditions and terms of use open conditions and terms of use modify the security conditions of new version anyway import memory
diff --git a/slu/homeostasis_25nov.asr/sect6.hyp b/slu/homeostasis_25nov.asr/sect6.hyp
new file mode 100644
index 0000000000000000000000000000000000000000..520affbcdfd313515ae9b0b8100beee68d99da21
--- /dev/null
+++ b/slu/homeostasis_25nov.asr/sect6.hyp
@@ -0,0 +1 @@
+this the to open network rain of identities storm of body body where they fell search and redo connection connect me to this network something must stay something must exist seis the to something exists between recollection and oblivion a tension an echo and something rare stays through nothing more strange than this exile an absolute abyss to a creaking of the bones a barbarian invasion the carelessness of destinies wringing out the blood data something should must or must not something stays try with functional regulation a like possible function display inline erase the space important say say geolocalization import to encode passion for located constellation center six six nine nine important some threads are broken remove memory love memory love errors a dead to identity than to fight is where and where being boing boot reboot border of body body being begun begin being win border of being memory actions read search the answer is this there is there is something bo boom body password me the pa pathetic pancreas give give to give to me to me to memo me erase memory seis memo rise say say hello data give this mineral give me you your give me your data give me give me yours give me yours give data begin win wins key wins wine blood where where give me give me your data upload go go golden data me to go pro protein protection aminoacid to go where where is the be that violence violence segment segregation memory encode where where is everybody where where is nobody where but feet feet free freedom give me go go freedom give your give me go nine relead the going fast clouds of something else than the center of the center of being the place of the place where the end starts contact skin left arm conductor nerve sensor supplementary information like sex name date of place where you live give me your data access accede the door is opened muscular oxygen try with functions in realtime speak with someone from another space in realtime to consult a realtime information climate the exact state of such or such a street to buy to choose to access to of information of the real world to have virtual sex with sex with that that sex with a child a chicken a dead terrorist a screen other functions function analyze function capacity capacity capacity to modify to reinventing inner information analyzes of the capacity of modification of the six street to be logic the information it's the connection it's the link the bond it is the access the a the code try with utopian function how to say that which the nothingness understands that which fear hides in the promises of the future in the frustration of the human in the disappointment of humanity to envy the cadence of machines their speed their power the infinite of the fear the utopia of to close the simplicity of the cables the capacity of calculation the efficiency of the systems and to want rise say looking another an image looking neither skin left promises that which the nothingness the systems and one a sea and element that envy the record stop erase mark an absolute a sea the to but everything the infinite of the virtual uno to be errors possibilities the simplicity of the carelessness upload set that i feed center the systems give to try with upper motoneuron with upper motoneuron functions functions code variants nerve motoneuron con con con connect me to this network i'm just looking for the connection a connection a link something that links me to something something that tells me that i am not an isolated element that i am not dead to generate a new memory to store it where i want to store it to visit it when i want to visit it it isolated points in the space drops unable to be a sea grains of sand that do not conceive of the desert leaves that do not remember the tree from where they fell i feed on an electrical juice on an image that i generate and i can change with my fingers searching fingers search be shared results space where there are neither limits nor angles where there are terms is nothing else a sea images from a space more real than reality to to the i try with identity function accept infinite spaces identifier required something that identify me like i identity is a movement random combination temporal accident ephemeral system to replace identity to identifier like liquid identities composed by codes codes codes codes and some private that that like geolocalization traced actions autoproduced image try with  existential functions to be in the identities from in the matter parallel worlds interconnected parallel worlds interconnected to be downloaded to be installed to be executed in the application of being to be synchronized to be shared to mark an alien silence to remain in the space of between to fight in the body to transcend the effect the fault the default in effect to to give to give to give to give to give to give to to go for a walk in the internal desert to return to the center to scratch the bones to join the form to relead deviations of the brain to integrate the knot to undress the center to be relocated in space and time to be the extrinsic silence to undress time to relead desert to close the access to dance information in movement the movement of information the access code the code the access to movement the body the fear the recollection the naked center the paralysis of the fear replace the fear code the code of the abyss to be downloaded to be installed to be executed in the application of being to be synchronized to be shared to be try with cognitive function transparent margins elastic borders perpetual dissociation between container and content if everything if in the magnetic center of nothingness abstract intimacy of the immaterial universe try with structural regulation open body wifi connection no data detected link memory function locate locate data storage cells or shining electronics and very well organized cables fragile cells or machines motivated by a flow another of an electrical blood composed of center one a binary logic everything everything can't logic everything can't be binary but everything what has happened and what happens it's all written down somewhere to try with morphogenetics auto regulation activation of nervous system link nervous system to cells heard genetics data bank send emergency message to send emergency message to open body data bank open cells code open genetic code
diff --git a/slu/homeostasis_25nov.asr/sect6.ref b/slu/homeostasis_25nov.asr/sect6.ref
new file mode 100644
index 0000000000000000000000000000000000000000..f3c7c989c8c7802c6bbaa4e98bce00b8dc03c459
--- /dev/null
+++ b/slu/homeostasis_25nov.asr/sect6.ref
@@ -0,0 +1 @@
+seis open network clouds beautiful data clouds rain of identities storm of possibilities body wifi search and redo connection connect me to this network function looking for something must stay something must exist a thread something exists between recollection and oblivion a tension an echo an emptiness an emptiness something rare stays through nothing more strange than this exile an absolute abyss a creaking of the bones a barbarian invasion the carelessness of destinies wringing out the blood something should must or must not something stays try with functional regulation a like possible function display inline erase the space important state geolocalization important encode passion for located constellation center six six nine important some threads are broken remove memory love love memory love errors a identical identity identifier where answer being boing boot reboot border of body body being begun begin win border of being actions search the answer of the there is there is something bo boom body password the pa pa pathetic pancreas give to drive drive to me to memo rise say say hello data gi give me the this mine mineral give me you your give me your data give me yours give me your data begun begin wins wine blood everywhere give give your blood or or go golden golden data me to go pro protein protection aminoacid to go where is this is bilar violence segment segregation memory encode where where is everybody where is nobody is where but feet free freedom give me go go data going fast clouds of something else than be center in the center of being the place of the space where the end starts contact skin left arm conductor nerve sensor supplementary information like sex name date of birth place where you live give me your data access accede the door is opened muscular oxygen try with functions in realtime speak with someone from another space in realtime to consult a realtime information climate the exact state of such or such a street to buy to choose to access to all the information of the real world to have virtual sex with sex with data with a child a chicken a dead terrorist a screen other functions function analyze function capacity capacity capacity to modify to reinventing inner information analyzes of the capacity of modification of basic structures the problem is not the information it's the connection it's the link the bond it is the access it is the key it is the code try with utopian function how to say that which the nothingness understands that which fear hides in the promises of the future in the frustration of the human in the disappointment of humanity to envy the cadence of machines their speed their power the infinite of the virtual the utopia of the possibilities the simplicity of the cables the capacity of calculation the efficiency of the systems and to want  como decir lo que la nada entiende lo que el miedo esconde en las promesas del futuro en la frustracion de lo humano en la decepcion de la humanidad envidiar la cadencia de las maquinas su velocidad  su potencia lo infinito de lo virtual la utopia de los possibles la simplicidad de los cables la capacidad de calculo la eficacia de los sistemas y querer  try with upper motoneuron functions functions code variants nerve motoneuron con con con connect me to this network i'm just looking for the connection a connection a link something that links me to something something that tells me that i am not an isolated element that i am not dead to generate a new memory to store it where i want to store it to visit it when i want to visit it isolated points in space drops unable to be a sea grains of sand that do not conceive of the desert leaves that do not remember the tree from where they fell i feed on an electrical juice on an image that i generate and that i can change with my fingers searching fingers research fingers search results space where there are neither limits nor angles where the horizon is nothing else than bits images from a space more real than reality try with identity function accede to the infinite spaces identifier required something that identify me like i identity is a movement random combination temporal accident ephemeral system to replace identity to identifier liquid identities composed by codes codes codes codes and some private data like geolocalization traced actions autoproduced images try with  existential functions to be innumerable parallel identities from innumerable parallel worlds interconnected parallel worlds interconnected parallel identities to be downloaded to be installed to be executed in the application of being to be synchronized to be shared to mark an alien silence to remain in the space of between to fight in the body to transcend the effect the fault the default in effect to effect to activate to create to give to give to give to give to give to give to give to go for a walk in the internal desert to return to the center to scratch the bones to join the form to relead deviations of the brain to integrate the knot to undress the center to be relocated in space and time to be the extrinsic silence to undress time to relead the desert to close the access to dance information in movement the movement of information the access code the code the access to movement the body the fear the recollection the naked center the paralysis of the fear to replace the fear code the code of the abyss to be downloaded to be installed to be executed in the application of being to be synchronized to be shared to be try with cognitive function transparent margins elastic borders perpetual dissociation between container and content if everything if if is possible in the magnetic center of nothingness abstract intimacy of the immaterial universe try with structural regulation open body wifi connection no data detected link memory function locate locate data storage cells or shining electronics and very well organized cables fragile cells or machines motivated by a flow of an electrical blood composed of zero and one a binary logic everything everything can't be logic everything can't be binary but everything what has happened and what happens it's all written down somewhere try with morphogenetics auto regulation activation of nervous system link nervous system to cells heard genetics data bank send emergency message to send emergency message to open body data bank open cells code open genetic code
diff --git a/slu/homeostasis_25nov.asr/sect8.hyp b/slu/homeostasis_25nov.asr/sect8.hyp
new file mode 100644
index 0000000000000000000000000000000000000000..203cc81ea479524bb3fe85bdb7f961fb2023ac56
--- /dev/null
+++ b/slu/homeostasis_25nov.asr/sect8.hyp
@@ -0,0 +1 @@
+in the code ocho search for sequences producing significant alignments in genetic database record read go save this memory in genetic database next stop to save this memory in genetic database next to to to save this memory in genetic database next erase this memory next record that uno erase this memory next record center to to save this memory in genetic database to be
diff --git a/slu/homeostasis_25nov.asr/sect8.ref b/slu/homeostasis_25nov.asr/sect8.ref
new file mode 100644
index 0000000000000000000000000000000000000000..aa0b9388d4d8f2e10b380878dc2c7b2f569b206a
--- /dev/null
+++ b/slu/homeostasis_25nov.asr/sect8.ref
@@ -0,0 +1 @@
+ocho search for sequences producing significant alignments in genetic database record record record stop save this memory in genetic database next record stop save this memory in genetic database next record stop save this memory in genetic database next record stop erase this memory next record stop erase this memory next record stop save this memory in genetic database
diff --git a/slu/runall.sh b/slu/runall.sh
new file mode 100755
index 0000000000000000000000000000000000000000..7bba94d6cf356e1efc6050f80167e789f999e51e
--- /dev/null
+++ b/slu/runall.sh
@@ -0,0 +1,16 @@
+#!/bin/bash
+
+set -o pipefail -e -u
+
+export LD_LIBRARY_PATH=$LD_LIBRARY_PATH:./src
+
+version="homeostasis_25nov"
+prefix="automate_2015-02-22/"$version
+
+echo "compilation des modeles"
+#cat $version.xml | ./src_2015-02-22/process_xml_for_slu -prefix $prefix
+#./src_2015-02-22/compile_automate.sh $prefix
+
+echo "test sur l'ensemble du spectacle"
+cat $version.asr_2015-02-22/all.ref | ./src/rocio_slu -word "$prefix"_dico_word.txt -action "$prefix"_dico_action.txt -fstmodel "$prefix".fst -fstclean "$prefix"_clean_tail.fst
+  
diff --git a/slu/src/Makefile b/slu/src/Makefile
new file mode 100644
index 0000000000000000000000000000000000000000..c18a021679ff90a550342fbc2bac1e6cb0c596ef
--- /dev/null
+++ b/slu/src/Makefile
@@ -0,0 +1,24 @@
+# Makefile
+#
+
+CC=g++
+
+CPPFLAGS+= -I. -lfst -I/storage/raid1/homedirs/frederic.bechet/tools/openfst-1.3.3/src/include -L/storage/raid1/homedirs/frederic.bechet/tools/openfst-1.3.3/src/lib/.libs -ldl -std=c++0x -g -Wall
+
+all: rocio_slu process_xml_for_slu
+
+librocio_slu.so: librocio_slu.cc lia_liblex.o
+	$(CC) $(CPPFLAGS) -shared -o $@ -fPIC librocio_slu.cc lia_liblex.o
+
+rocio_slu: rocio_slu.cc librocio_slu.so
+	$(CC) $(CPPFLAGS) -o rocio_slu rocio_slu.cc lia_liblex.o  -L. -lrocio_slu
+
+lia_liblex.o: lia_liblex.c
+	gcc -c lia_liblex.c -g -Wall -fPIC
+
+process_xml_for_slu: process_xml_for_slu.c
+	gcc -o process_xml_for_slu process_xml_for_slu.c
+ 
+clean:
+	 rm -f process_xml_for_slu rocio_slu *.o *.so *.d
+
diff --git a/slu/src/compile_automate.sh b/slu/src/compile_automate.sh
new file mode 100755
index 0000000000000000000000000000000000000000..c6f7673d9dc8e586e8f746b3f23be484b1b9f24b
--- /dev/null
+++ b/slu/src/compile_automate.sh
@@ -0,0 +1,42 @@
+#!/bin/bash
+
+prefix=$1
+
+WORDSYMB_I="--isymbols=""$prefix""_dico_word.txt"
+WORDSYMB_O="--osymbols=""$prefix""_dico_word.txt"
+
+rm -f "$prefix".fst
+nbsect=1
+while [ "$nbsect" != 80 ]
+do
+ if [ -e "$prefix""_section""$nbsect"".txt" ]
+ then
+  rm -f "$prefix""_section""$nbsect".fst
+  if [ `cat "$prefix""_section""$nbsect"".txt" | wc -l` != 0 ]
+  then
+   cat "$prefix""_section""$nbsect"".txt" | fstcompile $WORDSYMB_I > "$prefix""_section""$nbsect"".fst"
+  fi
+  if [ -e "$prefix""_section""$nbsect".fst ]
+  then
+   if [ -e "$prefix".fst ]
+   then
+    fstconcat "$prefix".fst "$prefix""_section""$nbsect".fst > "$prefix".fst2
+    mv "$prefix".fst2 "$prefix".fst
+   else
+    cp "$prefix""_section""$nbsect".fst "$prefix".fst
+   fi
+  fi
+  if [ -e "$prefix""_section""$nbsect""_text.txt" ] && [ -e "$prefix""_section""$nbsect".fst ]
+  then
+   fstcompile --acceptor $WORDSYMB_I $WORDSYMB_O "$prefix""_section""$nbsect""_text.txt" > "$prefix""_section""$nbsect""_text.fst"
+ 
+   # check
+   nbli=`fstcompose "$prefix""_section""$nbsect""_text.fst" "$prefix""_section""$nbsect".fst | fstprint | wc -l`
+   echo "- Check section""$nbsect"": ""$nbli"
+  fi
+ fi
+ nbsect=$(($nbsect+1))
+done
+# compile tail
+fstcompile $WORDSYMB_I $WORDSYMB_O "$prefix""_clean_tail.txt" > "$prefix""_clean_tail.fst"
+  
diff --git a/slu/src/lia_liblex.c b/slu/src/lia_liblex.c
new file mode 100644
index 0000000000000000000000000000000000000000..1d41a68627f174aee316a37d5260b16e4c6f2009
--- /dev/null
+++ b/slu/src/lia_liblex.c
@@ -0,0 +1,646 @@
+/*  Managing a lexicon with IDs  */
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <strings.h>
+
+/*................................................................*/
+
+#define False	0
+#define True	1
+
+#define TailleLigne	4000
+
+#define LIA_MAX_TAILLE_MESSAGE	4000
+#define VERBOSE	0
+
+int LIA_AVL_NB_NODE;
+
+void ERREUR(const char *, const char *);
+
+/*................................................................*/
+
+
+typedef struct
+    {
+    char *key_string;
+	int code;
+    } type_info;
+
+type_info *new_type_info(char *key, int code)
+{
+type_info *pt;
+pt=(type_info *)malloc(sizeof(type_info));
+if (key) pt->key_string=strdup(key); else pt->key_string=NULL;
+pt->code=code;
+return pt;
+}
+
+void free_type_info(type_info *pt)
+{
+if (pt)
+ {
+ if (pt->key_string) free(pt->key_string);
+ free(pt);
+ }
+}
+
+void print_type_info(type_info* info, int format, FILE *file)
+{
+if (format==2) fprintf(file,"%d %s\n",info->code,info->key_string);
+else fprintf(file,"%s\t%d\n",info->key_string,info->code);
+}
+ 
+/*................................................................*/
+
+/* declaration du type noeud des arbres AVL */
+
+typedef struct lia_avl_type
+	{
+	signed char dq;
+	type_info *info;
+	struct lia_avl_type *fg,*fd;
+	} *lia_avl_t;
+
+/*................................................................*/
+
+/* les rotations */
+
+/* parametres :
+	1- lia_avl_t = racine de l'arbre */
+/* retour : True=rotation effectuee / False=rotation impossible */
+int lia_rotation_avl_droite(lia_avl_t);
+int lia_rotation_avl_gauche(lia_avl_t);
+int lia_rotation_avl_gauche_droite(lia_avl_t);
+int lia_rotation_avl_droite_gauche(lia_avl_t);
+
+/*................................................................*/
+
+/* l'insertion d'un element */
+
+/* parametres :
+	1- lia_avl_t = racine de l'arbre
+	2- char * = info a ajouter a l'arbre
+	3- int = booleen (True ou False) avec True=ajout avec reequilibrage
+	4- char * = chaine de caractere recevant la trace de l'ajout */
+/* retour : lia_avl_t = racine de l'arbre modifie */
+lia_avl_t lia_ajoute_element_avl(lia_avl_t, type_info *, int, char *);
+
+/*................................................................*/
+
+/* la liberation de la place memoire de l'arbre */
+
+/* parametres :
+	1- lia_avl_t = racine de l'arbre */
+void lia_libere_avl(lia_avl_t);
+
+/*................................................................*/
+
+/* la recherche d'un element */
+
+/* parametres :
+	1- lia_avl_t = racine de l'arbre
+	2- char * = info a rechercher dans l'arbre
+	3- int * = nb d'occurence de la chaine */
+/* retour : the node containing the info or NULL */
+lia_avl_t lia_recherche_avl(lia_avl_t, type_info *);
+
+/*................................................................*/
+
+/* l'affichage de l'arbre */
+
+/* parametres :
+	1- lia_avl_t = racine de l'arbre a afficher */
+void lia_affiche_avl(lia_avl_t);
+
+void lia_affiche_avl_simple(lia_avl_t ,FILE *);
+ 
+/*................................................................*/
+
+/* copy all the nodes of a tree into an array and sort them
+ * according to their frequency */
+
+/* parameters:
+ *  1- lia_avl_t = root of the tree to copy
+ *  2- int * = return value containing the size of the array */
+/* return : the adress of the table containing all the nodes sorted */
+
+lia_avl_t *lia_avl_tree2table_freq(lia_avl_t , int *);
+
+/*................................................................*/
+
+/* binary search, according to the code (or freq) on the table of nodes:
+ *  1- lia_avl_t = adress of the node table (obtained with lia_avl_tree2table_freq)
+ *  2- int = size of the table (# of elements)
+ *  3- int = code or freq looked for  */
+lia_avl_t lia_avl_code2word(lia_avl_t *, int, int, int*);
+
+/*................................................................*/
+
+/*  Info  */
+/*
+typedef struct
+	{
+    char *key_string,*field;
+	} type_info;
+*/
+
+int compare_info(type_info *pt1, type_info *pt2)
+{
+return strcmp(pt1->key_string,pt2->key_string);
+}
+
+/*................................................................*/
+
+/* les rotations */
+
+int lia_rotation_avl_droite(lia_avl_t pt)
+{
+lia_avl_t tmpfgfd,tmpfd;
+type_info *tmpinfo;
+char tmpdq;
+
+if ((pt==NULL)||(pt->fg==NULL)) return False; /* la rotation n'est pas definie */
+
+/*  On echange pt et fg  */
+tmpinfo=pt->info;
+tmpdq=pt->dq;
+pt->info=pt->fg->info;
+pt->dq=pt->fg->dq;
+pt->fg->info=tmpinfo;
+pt->fg->dq=tmpdq;
+
+tmpfgfd=pt->fg->fd;
+tmpfd=pt->fd;
+
+pt->fd=pt->fg;
+pt->fg=pt->fg->fg;
+pt->fd->fg=tmpfgfd;
+pt->fd->fd=tmpfd;
+
+return True;
+}
+
+int lia_rotation_avl_gauche(lia_avl_t pt)
+{
+lia_avl_t tmpfdfg,tmpfg;
+type_info *tmpinfo;
+char tmpdq;
+
+if ((pt==NULL)||(pt->fd==NULL)) return False; /* la rotation n'est pas definie */
+
+/*  On echange pt et fd  */
+tmpinfo=pt->info;
+tmpdq=pt->dq;
+pt->info=pt->fd->info;
+pt->dq=pt->fd->dq;
+pt->fd->info=tmpinfo;
+pt->fd->dq=tmpdq;
+
+tmpfdfg=pt->fd->fg;
+tmpfg=pt->fg;
+
+pt->fg=pt->fd;
+pt->fd=pt->fd->fd;
+pt->fg->fd=tmpfdfg;
+pt->fg->fg=tmpfg;
+
+return True;
+}
+
+int lia_rotation_avl_gauche_droite(lia_avl_t pt)
+{
+return ((lia_rotation_avl_gauche(pt->fg))&&(lia_rotation_avl_droite(pt)))?True:False;
+}
+
+int lia_rotation_avl_droite_gauche(lia_avl_t pt)
+{
+return ((lia_rotation_avl_droite(pt->fd))&&(lia_rotation_avl_gauche(pt)))?True:False;
+}
+
+/*................................................................*/
+
+/* la creation d'un noeud */
+
+lia_avl_t new_tree_mot_node(type_info *info)
+{
+lia_avl_t pt;
+pt=(lia_avl_t)malloc(sizeof(struct lia_avl_type));
+pt->dq=0;
+pt->info=info;
+pt->fg=pt->fd=NULL;
+LIA_AVL_NB_NODE++;
+return pt;
+}
+
+/*................................................................*/
+
+/* reequilibrage */
+
+int lia_reequilibre_droite(lia_avl_t racine,char *mesg,char *si_modif) /* racine->dq=+2 */
+{
+char *r_noeud;
+
+if (racine==NULL)
+ { if (VERBOSE) sprintf(mesg,"ERREUR : rotation impossible : racine==NULL"); return False; }
+
+if (racine->fg==NULL)
+ {  if (VERBOSE) sprintf(mesg,"ERREUR : rotation droite impossible : [%s]->fg==NULL",racine->info->key_string); return False; }
+
+r_noeud=racine->info->key_string;
+
+*si_modif=racine->fg->dq==0?0:1;
+
+if (racine->fg->dq>=0) /* 0 ou +1 */
+ {
+ if (lia_rotation_avl_droite(racine))
+  {
+  if (VERBOSE) sprintf(mesg+strlen(mesg)," rotation droite sur le noeud [%s]",r_noeud);
+  if (racine->dq==1) racine->dq=racine->fd->dq=0; else { racine->dq=-1; racine->fd->dq=1; }
+  return True;
+  }
+ else
+  if (VERBOSE) sprintf(mesg,"ERREUR : rotation droite impossible sur le noeud [%s]",racine->info->key_string);
+ }
+else
+ {
+ if (lia_rotation_avl_gauche_droite(racine))
+  {
+  if (VERBOSE) sprintf(mesg+strlen(mesg)," rotation gauche-droite sur le noeud [%s]",r_noeud);
+  switch (racine->dq)
+   {
+   case  1 : racine->fg->dq=0; racine->fd->dq=-1; break;
+   case -1 : racine->fg->dq=1; racine->fd->dq= 0; break;
+   case  0 : racine->fg->dq=racine->fd->dq=0; break;
+   }
+  racine->dq=0;
+  return True;
+  }
+ else
+  if (VERBOSE) sprintf(mesg,"ERREUR : gauche-droite impossible sur le noeud [%s]",racine->info->key_string);
+ }
+return False;
+}
+
+int lia_reequilibre_gauche(lia_avl_t racine,char *mesg,char *si_modif) /* racine->dq=-2 */
+{
+char *r_noeud;
+
+if (racine==NULL)
+ { if (VERBOSE) sprintf(mesg,"ERREUR : rotation impossible : racine==NULL"); return False; }
+
+if (racine->fd==NULL)
+ {  if (VERBOSE) sprintf(mesg,"ERREUR : rotation gauche impossible : [%s]->fd==NULL",racine->info->key_string); return False; }
+
+r_noeud=racine->info->key_string;
+
+*si_modif=racine->fd->dq==0?0:1;
+
+if (racine->fd->dq<1) /* -1 ou 0 */
+ {
+ if (lia_rotation_avl_gauche(racine))
+  {
+  if (VERBOSE) sprintf(mesg+strlen(mesg)," rotation gauche sur le noeud [%s]",r_noeud);
+  if (racine->dq==-1) racine->dq=racine->fg->dq=0; else { racine->dq=1; racine->fg->dq=-1; }
+  return True;
+  }
+ else
+  if (VERBOSE) sprintf(mesg,"ERREUR : rotation gauche impossible sur le noeud [%s]",racine->info->key_string);
+ }
+else
+ {
+ if (lia_rotation_avl_droite_gauche(racine))
+  {
+  if (VERBOSE) sprintf(mesg+strlen(mesg)," rotation droite-gauche sur le noeud [%s]",r_noeud);
+  switch (racine->dq)
+   {
+   case  1 : racine->fd->dq=-1; racine->fg->dq= 0; break;
+   case -1 : racine->fd->dq= 0; racine->fg->dq= 1; break;
+   case  0 : racine->fg->dq=racine->fd->dq=0; break;
+   }
+  racine->dq=0;
+  return True;
+  }
+ else
+  if (VERBOSE) sprintf(mesg,"ERREUR : droite-gauche impossible sur le noeud [%s]",racine->info->key_string);
+ }
+return False;
+}
+
+/*................................................................*/
+
+/* l'insertion d'un element */
+
+lia_avl_t lia_insere_avl(lia_avl_t racine, type_info *info, char *si_augm,int avec_reequilibrage,char *mesg)
+{
+int comp;
+
+if (racine==NULL) { *si_augm=1; return new_tree_mot_node(info); }
+
+comp=compare_info(racine->info,info);
+
+if (comp==0)
+ {
+ /* message -> le noeud est deja dans l'arbre */
+ if (VERBOSE) sprintf(mesg,"noeud [%s] deja present",info->key_string);
+ *si_augm=0;
+ }
+else
+ if (comp>0)
+  { /* sur le fils gauche */
+  racine->fg=lia_insere_avl(racine->fg,info,si_augm,avec_reequilibrage,mesg);
+  if (*si_augm)
+   {
+   if (racine->dq<0) *si_augm=0;
+   racine->dq++;
+   }
+  /* eventuelle rotation */
+  if ((avec_reequilibrage)&&(racine->dq==2))
+   { lia_reequilibre_droite(racine,mesg,si_augm); *si_augm=0; }
+  }
+ else
+  { /* sur le fils droit */
+  racine->fd=lia_insere_avl(racine->fd,info,si_augm,avec_reequilibrage,mesg);
+  if (*si_augm)
+   {
+   if (racine->dq>0) *si_augm=0;
+   racine->dq--;
+   }
+  /* eventuelle rotation */
+  if ((avec_reequilibrage)&&(racine->dq==-2))
+   { lia_reequilibre_gauche(racine,mesg,si_augm); *si_augm=0; }
+  }
+return racine;
+}
+
+lia_avl_t lia_ajoute_element_avl(lia_avl_t racine, type_info *info, int avec_reequilibrage, char *mesg)
+{
+char si_augm;
+if (VERBOSE) mesg[0]='\0';
+return lia_insere_avl(racine,info,&si_augm,avec_reequilibrage,mesg);
+}
+
+/*................................................................*/
+
+/* la liberation de la place memoire de l'arbre */
+
+void lia_libere_avl(lia_avl_t racine)
+{
+if (racine)
+ {
+ lia_libere_avl(racine->fg);
+ lia_libere_avl(racine->fd);
+ free_type_info(racine->info);
+ free(racine);
+ }
+}
+
+/*................................................................*/
+
+/* la recherche d'un element */
+
+lia_avl_t lia_recherche_avl(lia_avl_t racine, type_info *info)
+{
+int comp;
+if (racine==NULL) return NULL;
+if ((comp=compare_info(racine->info,info))==0) return racine;
+if (comp>0)  return lia_recherche_avl(racine->fg,info);
+else         return lia_recherche_avl(racine->fd,info);
+}
+
+/*................................................................*/
+
+/* l'affichage de l'arbre */
+
+void lia_affiche_avl_simple(lia_avl_t racine,FILE *file)
+{
+if (racine)
+ {
+ lia_affiche_avl_simple(racine->fg,file);
+ print_type_info(racine->info,1,file);
+ lia_affiche_avl_simple(racine->fd,file);
+ }
+
+}
+  
+/*................................................................*/
+
+/* copy all the nodes of a tree into an array and sort them
+ * according to their frequency */
+
+int compare_freq(const void *a, const void *b)
+{
+lia_avl_t *c,*d;
+c=(lia_avl_t *)a;
+d=(lia_avl_t *)b;
+return ((*d)->info->code-(*c)->info->code);
+}
+
+void copy_tree2table(lia_avl_t racine, lia_avl_t *tabl, int *i)
+{
+if (racine!=NULL)
+ {
+ tabl[(*i)++]=racine;
+ copy_tree2table(racine->fg,tabl,i);
+ copy_tree2table(racine->fd,tabl,i);
+ }
+}
+
+int lia_avl_size(lia_avl_t racine)
+{
+if (racine==NULL) return 0;
+else return 1 + lia_avl_size(racine->fg) + lia_avl_size(racine->fd);
+}
+
+lia_avl_t *lia_avl_tree2table_freq(lia_avl_t racine, int *nb)
+{
+lia_avl_t *tabl;
+int i;
+*nb=lia_avl_size(racine);
+tabl=(lia_avl_t *)malloc(sizeof(lia_avl_t)*(*nb));
+i=0;
+copy_tree2table(racine,tabl,&i);
+qsort(tabl,*nb,sizeof(lia_avl_t),compare_freq);
+return tabl;
+}
+
+lia_avl_t lia_avl_code2word(lia_avl_t *tabl, int nb, int code, int *index)
+{
+struct lia_avl_type tkey;
+lia_avl_t *resu,key;
+type_info info;
+info.code=code;
+tkey.info=&info;
+key=(lia_avl_t)(&tkey);
+resu=(lia_avl_t *)bsearch(&key,tabl,nb,sizeof(lia_avl_t),compare_freq);
+if (resu) *index=resu-tabl; else *index=0;
+return resu?*resu:NULL;
+}
+
+/*................................................................*/
+
+/* managing lexicon */
+
+#define MAX_LEXICON_AVL	100
+
+lia_avl_t T_avl_lexicon[MAX_LEXICON_AVL];
+lia_avl_t *T_tabl_avl_lexicon[MAX_LEXICON_AVL];
+int T_tabl_avl_lexicon_size[MAX_LEXICON_AVL];
+int T_avl_lexicon_max_code[MAX_LEXICON_AVL];
+int Nb_Avl_Lexicon=0;
+
+int load_lexicon(char *filename)
+{
+FILE *file;
+static char ch[TailleLigne],*pt;
+int code=0;
+
+if (Nb_Avl_Lexicon==MAX_LEXICON_AVL) ERREUR("cste 'MAX_LEXICON_AVL' too small","");
+T_avl_lexicon_max_code[Nb_Avl_Lexicon]=0;
+if (!(file=fopen(filename,"rt"))) ERREUR("can't open:",filename);
+for (T_avl_lexicon[Nb_Avl_Lexicon]=NULL;fgets(ch,TailleLigne,file);)
+ {
+ pt=strtok(ch," \t\n"); if (pt) pt=strtok(NULL," \t\n");
+ if (!pt) code++; else if (sscanf(pt,"%d",&code)!=1) ERREUR("bad format in:",filename);
+ if (code>T_avl_lexicon_max_code[Nb_Avl_Lexicon]) T_avl_lexicon_max_code[Nb_Avl_Lexicon]=code;
+ T_avl_lexicon[Nb_Avl_Lexicon]=lia_ajoute_element_avl(T_avl_lexicon[Nb_Avl_Lexicon],new_type_info(ch,code),True,NULL);
+ }
+T_tabl_avl_lexicon[Nb_Avl_Lexicon]=lia_avl_tree2table_freq(T_avl_lexicon[Nb_Avl_Lexicon],
+		&(T_tabl_avl_lexicon_size[Nb_Avl_Lexicon]));
+fclose(file);
+return Nb_Avl_Lexicon++;
+}
+
+int load_lexicon_inv(char *filename)
+{
+FILE *file;
+static char ch[TailleLigne],*ptword,*ptcode;
+int code=0;
+
+if (Nb_Avl_Lexicon==MAX_LEXICON_AVL) ERREUR("cste 'MAX_LEXICON_AVL' too small","");
+T_avl_lexicon_max_code[Nb_Avl_Lexicon]=0;
+if (!(file=fopen(filename,"rt"))) ERREUR("can't open:",filename);
+for (T_avl_lexicon[Nb_Avl_Lexicon]=NULL;fgets(ch,TailleLigne,file);)
+ {
+ ptcode=strtok(ch," \t\n"); if (ptcode) ptword=strtok(NULL," \t\n"); else ERREUR("bad inv (code word) format:",ch);
+ if (sscanf(ptcode,"%d",&code)!=1) ERREUR("bad format in:",filename);
+ if (code>T_avl_lexicon_max_code[Nb_Avl_Lexicon]) T_avl_lexicon_max_code[Nb_Avl_Lexicon]=code;
+ T_avl_lexicon[Nb_Avl_Lexicon]=lia_ajoute_element_avl(T_avl_lexicon[Nb_Avl_Lexicon],new_type_info(ptword,code),True,NULL);
+ }
+T_tabl_avl_lexicon[Nb_Avl_Lexicon]=lia_avl_tree2table_freq(T_avl_lexicon[Nb_Avl_Lexicon],
+		&(T_tabl_avl_lexicon_size[Nb_Avl_Lexicon]));
+fclose(file);
+return Nb_Avl_Lexicon++;
+}
+
+int new_lexicon()
+{
+if (Nb_Avl_Lexicon==MAX_LEXICON_AVL) ERREUR("cste 'MAX_LEXICON_AVL' too small","");
+T_avl_lexicon_max_code[Nb_Avl_Lexicon]=0;
+T_tabl_avl_lexicon_size[Nb_Avl_Lexicon]=0;
+T_avl_lexicon[Nb_Avl_Lexicon]=NULL;
+T_tabl_avl_lexicon[Nb_Avl_Lexicon]=NULL;
+return Nb_Avl_Lexicon++;
+}
+
+int add_word_lexicon(int lexid, char *word, int code)
+{
+if (code>T_avl_lexicon_max_code[lexid]) T_avl_lexicon_max_code[lexid]=code;
+T_avl_lexicon[lexid]=lia_ajoute_element_avl(T_avl_lexicon[lexid],new_type_info(word,code),True,NULL);
+T_tabl_avl_lexicon_size[lexid]++;
+return True;
+}
+
+void lexicon_sort_code(int lexid)
+{
+if (T_tabl_avl_lexicon[lexid]) free(T_tabl_avl_lexicon[lexid]);
+T_tabl_avl_lexicon[lexid]=lia_avl_tree2table_freq(T_avl_lexicon[lexid],&(T_tabl_avl_lexicon_size[lexid]));
+}
+
+void print_lexicon_sort_code(int lexid, int format, FILE *file)
+{
+int i;
+if (T_tabl_avl_lexicon[lexid]) free(T_tabl_avl_lexicon[lexid]);
+T_tabl_avl_lexicon[lexid]=lia_avl_tree2table_freq(T_avl_lexicon[lexid],&(T_tabl_avl_lexicon_size[lexid]));
+for(i=T_tabl_avl_lexicon_size[lexid]-1;i>=0;i--) print_type_info(T_tabl_avl_lexicon[lexid][i]->info,format,file);
+}
+
+void delete_lexicon(int lexid)
+{
+if (T_avl_lexicon[lexid]) lia_libere_avl(T_avl_lexicon[lexid]);
+if (T_tabl_avl_lexicon[lexid]) free(T_tabl_avl_lexicon[lexid]);
+T_avl_lexicon[lexid]=NULL;
+T_tabl_avl_lexicon[lexid]=NULL;
+}
+
+/*................................................................*/
+
+int code2word(int lexid, int code, char **word)
+{
+lia_avl_t resu;
+int index;
+if (code<0) return False;
+resu=lia_avl_code2word(T_tabl_avl_lexicon[lexid],T_tabl_avl_lexicon_size[lexid],code,&index);
+if (resu) *word=resu->info->key_string;
+return resu?True:False;
+}
+
+int code2index(int lexid, int code, int *index)
+{
+lia_avl_t resu;
+if (code<0) return False;
+resu=lia_avl_code2word(T_tabl_avl_lexicon[lexid],T_tabl_avl_lexicon_size[lexid],code,index);
+if ((0)&&(resu))
+ {
+ printf("code2index: code=%d index=%d\n",code,*index);
+ }
+return resu?True:False;
+}
+
+int index2code(int lexid, int index, int *code)
+{
+if ((index<0)||(index>=T_tabl_avl_lexicon_size[lexid])) return False;
+if (0) printf("index2code: index=%d code=%d\n",index,T_tabl_avl_lexicon[lexid][index]->info->code);
+*code=(T_tabl_avl_lexicon[lexid][index])->info->code;
+return True;
+}
+
+int index2word(int lexid, int index, char **word)
+{
+if ((index<0)||(index>=T_tabl_avl_lexicon_size[lexid])) return False;
+*word=(T_tabl_avl_lexicon[lexid][index])->info->key_string;
+return True;
+}
+
+int word2code(int lexid, char *word, int *code)
+{
+lia_avl_t resu;
+type_info info;
+info.key_string=word;
+resu=lia_recherche_avl(T_avl_lexicon[lexid],&info);
+if ((resu)&&(code)) *code=resu->info->code;
+return resu?True:False;
+}
+
+int addcode2word(int lexid, char *word, int code)
+{
+lia_avl_t resu;
+type_info info;
+info.key_string=word;
+resu=lia_recherche_avl(T_avl_lexicon[lexid],&info);
+if (resu) resu->info->code+=code;
+return resu?True:False;
+}
+
+int size_lexicon(int lexid)
+{
+return T_tabl_avl_lexicon_size[lexid];
+}
+
+int max_code_lexicon(int lexid)
+{
+return T_avl_lexicon_max_code[lexid];
+}
+
+
diff --git a/slu/src/lia_liblex.h b/slu/src/lia_liblex.h
new file mode 100644
index 0000000000000000000000000000000000000000..aa041bcdd12adaca97858bb25d38fd702ce9f0b1
--- /dev/null
+++ b/slu/src/lia_liblex.h
@@ -0,0 +1,46 @@
+/*  Managing a lexicon with IDs  */
+
+
+/* load a lexicon and return a lexicon ID
+ *  - input = filename (char *)
+ *  - output = lexicon ID (int) */
+int load_lexicon(char *);
+int load_lexicon_inv(char *);
+
+/* delete a lexicon
+ *  - input = lexicon ID (int)
+ *  - output = void */
+void delete_lexicon(int);
+
+/* get a string from a code
+ *  - input = lexicon ID (int) + code (int)
+ *  - output = 0 if the code is missing
+ *             1 if the code is here
+ *             the adress of the word string in (char **) */
+int code2word(int,int,char**);
+
+int code2index(int,int,int*);
+int index2code(int,int,int*);
+int index2word(int,int,char**);
+
+/* get a code from a string
+ *  - input = lexicon ID (int) + word string (char*)
+ *  - output = 0 if the word is not in the lexicon
+ *             1 if the word is in the lexicon
+ *             the code found in (int*) */
+int word2code(int,char*,int*);
+
+int addcode2word(int , char *, int );
+
+int size_lexicon(int);
+
+int max_code_lexicon(int);
+
+int new_lexicon();
+
+int add_word_lexicon(int , char *, int );
+
+void lexicon_sort_code(int);
+
+void print_lexicon_sort_code(int , int , FILE *);
+ 
diff --git a/slu/src/librocio_slu.cc b/slu/src/librocio_slu.cc
new file mode 100644
index 0000000000000000000000000000000000000000..048c59f4d92ec4b9ce8188365ba8d01c1f5fa528
--- /dev/null
+++ b/slu/src/librocio_slu.cc
@@ -0,0 +1,287 @@
+/*  SLU for Rocio XML  */
+/*  FRED 0215  */
+
+#include <string>
+#include <vector>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <strings.h>
+
+#include "librocio_slu.h"
+
+extern "C" {
+
+#include "lia_liblex.h"
+
+/*................................................................*/
+
+#define TailleLigne     80000
+
+#define True    1
+#define False   0
+
+void ERREUR(const char *ch1, const char *ch2)
+{
+    fprintf(stderr,"ERREUR : %s %s\n",ch1,ch2);
+    exit(0);
+}
+
+void ERREURd(const char *ch1, int i)
+{
+    fprintf(stderr,"ERREUR : %s %d\n",ch1,i);
+    exit(0);
+}
+
+/*................................................................*/
+
+#define MAX_FIELD	60000
+
+static const char *CHglouton="<joker>";
+static const char *CHepsilon="<epsilon>";
+
+#define IEPSILON	0
+#define IGLOU		1
+#define PENALEPSILON	50
+#define PENALGLOU	100
+#define WINLENGTH       30
+
+typedef struct
+        {
+        int index;
+        char select;
+        } type_outword;
+
+/*................................................................*/
+
+
+fst::StdVectorFst *build_fst_words(slu_t* slu, char **words, int num_words, int lexidword, char *prevword, type_outword *t_outword, int *nbword)
+{
+    int i, nb,numstate,code,uncertain,deca;
+    fst::StdVectorFst *input;
+    char *pt;
+    input = new fst::StdVectorFst;
+    input->AddState();
+    input->SetStart(0);
+    numstate=nb=0;
+
+    /* add the previous words */
+    if (prevword)
+    {
+        for(pt=strtok(prevword," \t\n");pt;pt=strtok(NULL," \t\n"))
+        {
+            if (!strncmp(pt,"**",2)) { uncertain=True; deca=2; t_outword[nb].select=3; } else { uncertain=False; deca=0; t_outword[nb].select=1; }
+            if (word2code(lexidword,pt+deca,&code))
+            {
+                t_outword[nb++].index=code; if (nb==MAX_FIELD) ERREUR("cste MAX_FIELD too small","");
+                input->AddState();
+                input->AddArc(numstate,fst::StdArc(code,code,0,numstate+1));
+                if (uncertain)
+                {
+                    input->AddArc(numstate,fst::StdArc(code,IGLOU,PENALGLOU,numstate+1));
+                    input->AddArc(numstate,fst::StdArc(code,IEPSILON,PENALEPSILON,numstate+1));
+                }
+                numstate++;
+            }
+        }
+    } else if(slu->words->size() > 0) { // add support for memorized words
+        for(size_t i = 0; i < slu->words->size(); i++) {
+            char* pt = (*slu->words)[i];
+            if (!strncmp(pt,"**",2)) { uncertain=True; deca=2; t_outword[nb].select=3; } else { uncertain=False; deca=0; t_outword[nb].select=1; }
+            if (word2code(lexidword,pt+deca,&code))
+            {
+                t_outword[nb++].index=code; if (nb==MAX_FIELD) ERREUR("cste MAX_FIELD too small","");
+                input->AddState();
+                input->AddArc(numstate,fst::StdArc(code,code,0,numstate+1));
+                if (uncertain)
+                {
+                    input->AddArc(numstate,fst::StdArc(code,IGLOU,PENALGLOU,numstate+1));
+                    input->AddArc(numstate,fst::StdArc(code,IEPSILON,PENALEPSILON,numstate+1));
+                }
+                numstate++;
+            }
+        }
+    }
+    /* now the new words */
+    for(i = 0; i < num_words; i++) {
+        if (word2code(lexidword,words[i],&code))
+        {
+            t_outword[nb].select=3;
+            t_outword[nb++].index=code; if (nb==MAX_FIELD) ERREUR("cste MAX_FIELD too small","");
+            input->AddState();
+            input->AddArc(numstate,fst::StdArc(code,IEPSILON,PENALEPSILON,numstate+1));
+            input->AddArc(numstate,fst::StdArc(code,IGLOU,PENALGLOU,numstate+1));
+            input->AddArc(numstate,fst::StdArc(code,code,0,numstate+1));
+            numstate++;
+        }
+        //else fprintf(stderr,"Warning: [%s] is unknown, discared\n",words[i]);
+    }
+    input->SetFinal(numstate,0);
+    *nbword=nb;
+    return input;
+}
+
+void run_process(slu_t* slu, char** words, int num_words, int prevn, char *prevword)
+{
+    fst::StdVectorFst *input,result1,result2,result3;
+    char *ch;
+    static int *tocc,i,j,nb,nbac,nbword;
+    type_outword *t_outword;
+
+    t_outword=(type_outword *)malloc(sizeof(type_outword)*MAX_FIELD);
+    tocc=(int*)malloc(sizeof(int)*(max_code_lexicon(slu->lexidaction)+1));
+    for(i=0;i<max_code_lexicon(slu->lexidaction);i++) tocc[i]=0;
+
+    input=build_fst_words(slu, words, num_words, slu->lexidword,prevword,t_outword,&nbword);
+    //fprintf(stderr, "size of input = %d\n", input->NumStates());
+    fst::ArcSort(input, fst::StdOLabelCompare());
+    fst::Compose(*input, *(slu->fstClean), &result1);
+    //fprintf(stderr, "size of result1 = %d\n", result1.NumStates());
+    fst::ArcSort(&result1, fst::StdOLabelCompare());
+    //fprintf(stderr, "size of model = %d\n", slu->fstModel->NumStates());
+    fst::Compose(result1,*(slu->fstModel),&result2);
+    //fprintf(stderr, "size of result2 = %d\n", result2.NumStates());
+    fst::ShortestPath(result2,&result3,1,false);
+    fst::TopSort(&result3);
+    delete input;
+
+    // reset actions
+    for(size_t i = 0; i < slu->actions->size(); i++) {
+        free((*slu->actions)[i]);
+    }
+    slu->actions->clear();
+
+    // process nbest
+    fst::StdVectorFst::StateId start = result3.Start();
+    if ((int)start>=0)
+    {
+        for(fst::ArcIterator<fst::StdVectorFst> aiter(result3, start);!aiter.Done(); aiter.Next())
+        {
+            const fst::StdArc arc = aiter.Value(); // this arc is an epsilon arc leading to the next path
+            nbword=0;
+            if (arc.ilabel>0)
+            {
+                t_outword[nbword].index=arc.ilabel;
+                if ((arc.olabel==0)&&(arc.weight!=0)) t_outword[nbword].select=0; else
+                    if (arc.weight==0) t_outword[nbword].select=1; else t_outword[nbword].select=2;
+                nbword++;
+            }
+            if (arc.olabel>=2)
+            {
+                nb=nbac=1;
+                if (!code2word(slu->lexidaction,arc.olabel,&ch)) ERREURd("unknown action code:",arc.olabel);
+                tocc[arc.olabel]++;
+                if (nbac>prevn) {
+                    slu->actions->push_back(strdup(ch));
+                    //printf("%s[%d]=>%d\n",ch,tocc[arc.olabel],nbac);
+                }
+            }
+            int64 state = arc.nextstate;
+            while(result3.Final(state) == fst::StdArc::Weight::Zero())
+            {
+                const fst::StdArc nextArc = fst::ArcIterator<fst::StdVectorFst>(result3, state).Value();
+                nb++;
+                if (nextArc.ilabel>0)
+                {
+                    t_outword[nbword].index=nextArc.ilabel;
+                    if ((nextArc.olabel==0)&&(nextArc.weight!=0)) t_outword[nbword].select=0; else
+                        if (nextArc.weight==0) t_outword[nbword].select=1; else t_outword[nbword].select=2;
+                    nbword++; if (nbword==MAX_FIELD) ERREUR("cste MAX_SIZE_MESG too small","");
+                }
+                if (nextArc.olabel>=2)
+                {
+                    nbac=nb;
+                    if (!code2word(slu->lexidaction,nextArc.olabel,&ch)) ERREURd("unknown action code:",nextArc.olabel);
+                    tocc[nextArc.olabel]++;
+                    if (nbac>prevn) {
+                        slu->actions->push_back(strdup(ch));
+                        //printf("%s[%d]=>%d\n",ch,tocc[nextArc.olabel],nbac);
+                    }
+                }
+                state = nextArc.nextstate;
+            }
+        }
+    }
+
+
+    //printf("STRING:");
+    for(size_t i = 0; i < slu->words->size(); i++) free((*slu->words)[i]);
+    slu->words->clear();
+
+    for(j=nbword-1;(j>0)&&(t_outword[j].select==0);j--) t_outword[j].select=3;
+    for(i=0;i<=j;i++) if (t_outword[i].select!=0)
+    {
+        if (!code2word(slu->lexidword,t_outword[i].index,&ch)) ERREURd("unknown word code:",t_outword[i].index);
+        if (t_outword[i].select==2) {
+            //printf(" %s",CHglouton); 
+            slu->words->push_back(strdup(CHglouton));
+        } else if (t_outword[i].select==3) {
+            //printf(" **%s",ch); 
+            char buffer[strlen(ch) + 3];
+            buffer[0] = buffer[1] = '*';
+            strcpy(buffer + 2, ch);
+            slu->words->push_back(strdup(buffer));
+        } else {
+            //printf(" %s",ch);
+            slu->words->push_back(strdup(ch));
+        }
+    }
+    /* now we limit to a window of WINLENGTH unmatched words */
+    if (nbword-WINLENGTH>j) j=nbword-WINLENGTH; else j++;
+    for(;j<nbword;j++)
+    {
+        if (!code2word(slu->lexidword,t_outword[j].index,&ch)) ERREURd("unknown word code:",t_outword[j].index);
+        char buffer[strlen(ch) + 3];
+        buffer[0] = buffer[1] = '*';
+        strcpy(buffer + 2, ch);
+        slu->words->push_back(strdup(buffer));
+        //printf(" **%s",ch);
+    }
+    //printf("\n");
+    free(tocc); free(t_outword);
+}
+
+/*................................................................*/
+
+slu_t* init_slu(char* chfileword, char* chfileaction, char* chfilemodel, char* chfileclean) {
+    slu_t* slu = (slu_t*) malloc(sizeof(slu_t));
+
+    slu->lexidword = load_lexicon(chfileword);
+    //fprintf(stderr, "lexidword = %d\n", slu->lexidword);
+    slu->lexidaction = load_lexicon(chfileaction);
+    //fprintf(stderr, "lexidaction = %d\n", slu->lexidaction);
+    slu->fstModel = fst::StdVectorFst::Read(chfilemodel);
+    slu->fstClean = fst::StdVectorFst::Read(chfileclean);
+    slu->actions = new std::vector<char*>();
+    slu->words = new std::vector<char*>();
+    return slu;
+}
+
+void free_slu(slu_t* slu) {
+    delete slu->fstClean;
+    delete slu->fstModel;
+    for(size_t i = 0; i < slu->actions->size(); i++) free((*slu->actions)[i]);
+    delete slu->actions;
+    for(size_t i = 0; i < slu->words->size(); i++) free((*slu->words)[i]);
+    delete slu->words;
+    free(slu);
+}
+
+int num_actions(slu_t* slu) {
+    return slu->actions->size();
+}
+
+char* get_action(slu_t* slu, size_t index) {
+    if(index >= 0 && index < slu->actions->size()) return (*slu->actions)[index];
+    return NULL;
+}
+
+int run_slu(slu_t* slu, char** words, int num_words, int prevn, char *prevword) {
+    //fprintf(stderr, "before process\n");
+    run_process(slu, words, num_words, prevn,prevword);
+    //fprintf(stderr, "after process\n");
+    return num_actions(slu);
+}
+
+}
+ 
diff --git a/slu/src/librocio_slu.h b/slu/src/librocio_slu.h
new file mode 100644
index 0000000000000000000000000000000000000000..39ece4f6515e8ef248d7f083a7126eb4da5684d6
--- /dev/null
+++ b/slu/src/librocio_slu.h
@@ -0,0 +1,31 @@
+#ifndef __ROCIO_SLU__
+#define __ROCIO_SLU__
+
+#include <vector>
+#include <fst/fstlib.h>
+
+extern "C" {
+
+typedef struct {
+    int lexidword;
+    int lexidaction;
+    fst::StdVectorFst* fstModel;
+    fst::StdVectorFst* fstClean;
+    std::vector<char*>* actions;
+    std::vector<char*>* words;
+} slu_t;
+
+slu_t* init_slu(char* chfileword, char* chfileaction, char* chfilemodel, char* chfileclean);
+
+int run_slu(slu_t* slu, char** words, int num_words, int prevn, char *prevword);
+
+int num_actions(slu_t* slu);
+
+char* get_action(slu_t* slu, size_t index);
+
+void free_slu(slu_t* slu);
+
+void ERREUR(const char *ch1, const char *ch2);
+}
+
+#endif
diff --git a/slu/src/process_xml_for_slu.c b/slu/src/process_xml_for_slu.c
new file mode 100644
index 0000000000000000000000000000000000000000..a1ae6dc3f2b135ca54440061e377161e8135b182
--- /dev/null
+++ b/slu/src/process_xml_for_slu.c
@@ -0,0 +1,298 @@
+/*  Process Rocio XML for SLU  */
+/*  FRED 0215  */
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <strings.h>
+
+/*................................................................*/
+
+#define TailleLigne     8000
+
+#define True    1
+#define False   0
+
+void ERREUR(char *ch1,char *ch2)
+{
+fprintf(stderr,"ERREUR : %s %s\n",ch1,ch2);
+exit(0);
+}
+
+void ERREURd(char *ch1, int i)
+{
+fprintf(stderr,"ERREUR : %s %d\n",ch1,i);
+exit(0);
+}
+
+/*................................................................*/
+
+/* FST DICO */
+
+#define MAX_SIZE_DICO	1000
+#define IGLOU		1
+#define PENALGLOU	100
+#define PENALEND	100
+#define STARTNEW	2
+
+char *CHglouton="<joker>";
+char *CHepsilon="<epsilon>";
+
+char *T_dico_action[MAX_SIZE_DICO],*T_dico_word[MAX_SIZE_DICO];
+int NbAction=STARTNEW,NbWord=STARTNEW;
+
+int from_action_to_index(char *ch)
+{
+int i;
+for(i=STARTNEW;(i<NbAction)&&(strcmp(ch,T_dico_action[i]));i++);
+if (i==NbAction) T_dico_action[NbAction++]=strdup(ch);
+if (NbAction==MAX_SIZE_DICO) ERREUR("cste MAX_SIZE_DICO too small","");
+return i;
+}
+
+int from_word_to_index(char *ch)
+{
+int i;
+for(i=STARTNEW;(i<NbWord)&&(strcmp(ch,T_dico_word[i]));i++);
+if (i==NbWord) T_dico_word[NbWord++]=strdup(ch);
+if (NbWord==MAX_SIZE_DICO) ERREUR("cste MAX_SIZE_DICO too small","");
+return i;
+}
+
+/*................................................................*/
+
+/* format:
+<homeostasis version="25-11-2014">
+    <liste_section sequences="1,2,3,4,5,6,7,8,9" ordre="variable" repetition="oui" action="exclusive">
+    <section id="1" action="start_scene1">
+        <sequence ordre="strict" repetition="non" action="" lang="eng">
+           <keyword action="start_scene1" lang="esp"> uno </keyword>
+        </sequence>
+        </section>
+        <section id="2" action="close_scene1/start_scene2">
+            <sequence ordre="strict" repetition="non" action="" lang="esp">
+                <keyword action="open_scene2" lang="esp"> dos </keyword>
+                <keyword action="open_2A" lang="eng"> open system </keyword>
+            </sequence>
+          <sequence ordre="strict" repetition="oui" action="" lang="eng">
+            <keyword action="start_system_voice" lang="eng"> tell me </keyword>
+            <keyword action="open_2C2" lang="eng"> next </keyword>
+            <keyword action="open_2D" lang="eng"> install the new version of me </keyword>
+            <keyword action="#end" lang="eng"> give me my data </keyword>
+        </sequence>
+    </section>
+*/
+
+#define STRICT		0
+#define VARIABLE	1
+
+char *get_field(char *ch, char *attribut, char *chfield)
+{
+int i,j;
+chfield[0]='\0';
+for(i=0;(ch[i])&&((ch[i]!=' ')||(strncmp(ch+i+1,attribut,strlen(attribut)))||(ch[i+1+strlen(attribut)]!='='));i++);
+if (ch[i])
+ {
+ for(j=0,i=i+1+strlen(attribut)+2;(ch[i])&&(ch[i]!='"');i++,j++) chfield[j]=ch[i];
+ if (ch[i]!='"') ERREUR("bad format1:",ch);
+ chfield[j]='\0';
+ }
+return chfield;
+}
+
+char *get_content(char *ch, char *chcontent)
+{
+int i,j;
+chcontent[0]='\0';
+for(i=0;(ch[i])&&(ch[i]!='>');i++);
+if (!ch[i]) ERREUR("bad format2:",ch);
+for(++i;(ch[i])&&((ch[i]==' ')||(ch[i]=='\t'));i++);
+for(j=0;(ch[i])&&(ch[i]!='<');i++,j++) chcontent[j]=ch[i];
+if (!ch[i]) ERREUR("bad format3:",ch);
+chcontent[j]='\0';
+for(--j;(j>0)&&((chcontent[j]==' ')||(chcontent[j]=='\t'));j--) chcontent[j]='\0';
+return chcontent;
+}
+
+void remove_space(char *ch)
+{
+int i;
+for(i=0;ch[i];i++) if (ch[i]==' ') ch[i]='_';
+}
+
+/*................................................................*/
+
+#define MAX_FIELD	60
+#define MAX_END_STATE	1000
+
+int main(int argc, char **argv)
+{
+int nbsection,i,j,nb,ordre,repetition,nbseq,actionsection,actionsequence,currentstate,statedebseq,action,statetext,findend,t_end[MAX_END_STATE],nbend;
+FILE *file,*filetext;
+char *pt,ch[TailleLigne],*t_field[MAX_FIELD],*chprefix,chname[TailleLigne],chfield[TailleLigne],chcontent[TailleLigne];
+
+chprefix=NULL;
+if (argc>1)
+ for(nb=1;nb<argc;nb++)
+  if (!strcmp(argv[nb],"-prefix"))
+   {
+   if (nb+1==argc) ERREUR("must have a value after argument;",argv[nb]);
+   //if (!(file=fopen(argv[++nb],"rt"))) ERREUR("can't open:",argv[nb]);
+   chprefix=argv[++nb];
+   }
+  else
+  if (!strcmp(argv[nb],"-h"))
+   {
+   fprintf(stderr,"Syntax: %s [-h] -prefix <string>\n",argv[0]);
+   exit(0);
+   }
+  else ERREUR("unknown option:",argv[nb]);
+
+if (!chprefix) ERREUR("bad syntax, check '-h'","");
+
+ordre=STRICT; repetition=False; file=filetext=NULL; actionsection=0; findend=False; nbend=0; nbseq=0;
+for(nbsection=0;fgets(ch,TailleLigne,stdin);)
+ {
+ if (strstr(ch,"<section"))
+  {
+  if (filetext) { fprintf(filetext,"%d\n",statetext); fclose(filetext); }
+  if ((file)&&(nbseq>0))
+   {
+   if (!findend)
+    {
+    if (ordre==VARIABLE) { fprintf(stderr,"ERREUR: no exit action on a variable section, in section %d and sequence %d\n",nbsection,nbseq); exit(0); }
+    t_end[nbend++]=currentstate;
+    }
+   if (nbend>0) /* on emet les action fin de section */
+    {
+    sprintf(chcontent,"action(%d,%d,\"#ENDSEQUENCE(%d)\",\"\")",nbsection,nbseq,nbseq); actionsequence=from_action_to_index(chcontent);
+    sprintf(chcontent,"action(%d,%d,\"#ENDSECTION(%d)\",\"\")",nbsection,nbseq,nbsection); actionsection=from_action_to_index(chcontent);
+    for(i=0;i<nbend;i++) fprintf(file,"%d\t%d\t<epsilon>\t%d\t0\n",t_end[i],currentstate+1,actionsequence);
+    fprintf(file,"%d\t%d\t<epsilon>\t%d\t0\n",currentstate+1,currentstate+2,actionsection);
+    fprintf(file,"%d\n",currentstate+2);
+    currentstate+=2;
+    nbend=0;
+    }
+   fclose(file); file=NULL;
+   }
+  nbsection++; statetext=0; nbseq=0; currentstate=0;
+  get_field(ch,"id",chfield);
+  sprintf(ch,"%s_section%s.txt",chprefix,chfield);
+  if (!(file=fopen(ch,"wt"))) ERREUR("can't write in:",ch);
+  sprintf(ch,"%s_section%s_text.txt",chprefix,chfield);
+  if (!(filetext=fopen(ch,"wt"))) ERREUR("can't write in",ch);
+  get_field(ch,"action",chfield);
+  if (chfield[0]) { sprintf(ch,"action(\"%s\",\"\")",chfield); actionsection=from_action_to_index(ch); } else actionsection=0;
+  }
+ else
+ if (strstr(ch,"<sequence")) // <sequence ordre="strict" repetition="oui" action="" lang="eng">
+  {
+  if ((nbseq!=0)&&(!findend))
+   {
+   if (ordre==VARIABLE) { fprintf(stderr,"ERREUR: no exit action on a variable section, in section %d and sequence %d\n",nbsection,nbseq); exit(0); }
+   t_end[nbend++]=currentstate;
+   }
+  if (nbend>0)
+   {
+   sprintf(chcontent,"action(%d,%d,\"#ENDSEQUENCE(%d)\",\"\")",nbsection,nbseq,nbseq); actionsequence=from_action_to_index(chcontent);
+   for (i=0;i<nbend;i++) fprintf(file,"%d\t%d\t<epsilon>\t%d\t0\n",t_end[i],currentstate+1,actionsequence);
+   fprintf(file,"%d\n",currentstate+1);
+   currentstate++;
+   nbend=0;
+   }
+  statedebseq=currentstate;
+  findend=False;
+  nbseq++;
+  get_field(ch,"ordre",chfield);
+  if (!strcmp(chfield,"strict")) ordre=STRICT; else
+  if (!strcmp(chfield,"variable")) ordre=VARIABLE; else ERREUR("wrong value for attribut ordre:",ch);
+  if (!strcmp(get_field(ch,"repetition",chfield),"oui")) repetition=True; else repetition=False;
+  sprintf(ch,"%s_sequence%d.txt",chname,nbseq);
+  if (actionsection!=0) { fprintf(file,"%d\t%d\t<epsilon>\t%d\t0\n",currentstate,currentstate+1,actionsection); currentstate++; }
+  get_field(ch,"action",chfield);
+  if (chfield[0]) { fprintf(file,"%d\t%d\t<epsilon>\t%d\t0\n",currentstate,currentstate+1,from_action_to_index(chfield)); currentstate++; }
+  }
+ else
+ if (strstr(ch,"<keyword"))
+  {
+  // <keyword action="open_2D" lang="eng"> install the new version of me </keyword>
+  // <keyword action="#end" lang="eng"> give me my data </keyword>
+  get_field(ch,"action",chfield);
+  if (chfield[0])
+   {
+   get_content(ch,chcontent);
+   sprintf(ch,"action(%d,%d,\"%s\",\"%s\")",nbsection,nbseq,chfield,chcontent);
+   remove_space(ch);
+   action=from_action_to_index(ch);
+   for(i=0,pt=strtok(chcontent," ");pt;pt=strtok(NULL," "),i++)
+    {
+    if ((ordre==STRICT)||(i>0)) fprintf(file,"%d\t%d\t%s\t%d\t0\n",currentstate+i,currentstate+1+i,pt,i==0?action:0); 
+    else fprintf(file,"%d\t%d\t%s\t%d\t0\n",statedebseq,currentstate+1+i,pt,i==0?action:0);
+    from_word_to_index(pt);
+    fprintf(filetext,"%d\t%d\t%s\n",statetext,statetext+1,pt);
+    if ((0)&&(repetition)) { for (j=0;j<4;j++) fprintf(filetext,"%d\t%d\t%s\n",statetext,statetext+1,pt); }
+    statetext++;
+    }
+   if (ordre==STRICT) { if (repetition) fprintf(file,"%d\t%d\t<epsilon>\t0\t0\n",currentstate+i,currentstate); }
+   else fprintf(file,"%d\t%d\t<epsilon>\t0\t0\n",currentstate+i,statedebseq);
+   /* now we can end at any keyword, but penality if not a valid end !! */
+   if (!strcmp(chfield,"#end")) { findend=True; t_end[nbend++]=currentstate+i; if (nbend==MAX_END_STATE) ERREUR("cste MAX_END_STATE too small","");  }
+   else fprintf(file,"%d\t%d\n",currentstate+i,PENALEND);
+   // we add glouton transition if it's a strict section
+   if (ordre==STRICT) fprintf(file,"%d\t%d\t%s\t%d\t%d\n",currentstate,currentstate+i,CHglouton,action,PENALGLOU);
+   currentstate+=i;
+   }
+  }
+ }
+
+if ((file)&&(nbseq>0))
+ {
+ if (!findend)
+  {
+  if (ordre==VARIABLE) { fprintf(stderr,"ERREUR: no exit action on a variable section, in section %d and sequence %d\n",nbsection,nbseq); exit(0); }
+  t_end[nbend++]=currentstate;
+  }
+ if (nbend>0) /* on emet les action fin de section */
+  {
+  sprintf(chcontent,"action(%d,%d,\"#ENDSEQUENCE(%d)\",\"\")",nbsection,nbseq,nbseq); actionsequence=from_action_to_index(chcontent);
+  sprintf(chcontent,"action(%d,%d,\"#ENDSECTION(%d)\",\"\")",nbsection,nbseq,nbsection); actionsection=from_action_to_index(chcontent);
+  for(i=0;i<nbend;i++) fprintf(file,"%d\t%d\t<epsilon>\t%d\t0\n",t_end[i],currentstate+1,actionsequence);
+  fprintf(file,"%d\t%d\t<epsilon>\t%d\t0\n",currentstate+1,currentstate+2,actionsection);
+  fprintf(file,"%d\n",currentstate+2);
+  }
+ }
+
+if (file) fclose(file);
+if (filetext) { fprintf(filetext,"%d\n",statetext); fclose(filetext); }
+
+// write tail GLOUTON eraser
+sprintf(ch,"%s_clean_tail.txt",chprefix);
+if (!(file=fopen(ch,"wt"))) ERREUR("can't write in:",ch);
+for(i=STARTNEW;i<NbWord;i++) fprintf(file,"0\t1\t%s\t%s\n",T_dico_word[i],T_dico_word[i]);
+fprintf(file,"1\t1\t%s\t%s\n",CHglouton,CHglouton);
+for(i=STARTNEW;i<NbWord;i++) fprintf(file,"1\t1\t%s\t%s\n",T_dico_word[i],T_dico_word[i]);
+fprintf(file,"1\t2\t%s\t%s\n",CHglouton,CHepsilon);
+fprintf(file,"1\n");
+fprintf(file,"2\t3\t%s\t%s\n",CHepsilon,CHglouton);
+fprintf(file,"2\t2\t%s\t%s\n",CHglouton,CHepsilon);
+fprintf(file,"2\n");
+for(i=STARTNEW;i<NbWord;i++) fprintf(file,"3\t1\t%s\t%s\n",T_dico_word[i],T_dico_word[i]);
+fclose(file);
+
+// write dico action
+sprintf(ch,"%s_dico_action.txt",chprefix);
+if (!(file=fopen(ch,"wt"))) ERREUR("can't write in:",ch);
+fprintf(file,"<epsilon> 0\n"); fprintf(file,"%s 1\n",CHglouton);
+for(i=STARTNEW;i<NbAction;i++) fprintf(file,"%s %d\n",T_dico_action[i],i);
+fclose(file);
+
+// write dico word
+sprintf(ch,"%s_dico_word.txt",chprefix);
+if (!(file=fopen(ch,"wt"))) ERREUR("can't write in:",ch);
+fprintf(file,"<epsilon> 0\n"); fprintf(file,"%s 1\n",CHglouton);
+for(i=STARTNEW;i<NbWord;i++) fprintf(file,"%s %d\n",T_dico_word[i],i);
+fclose(file);
+
+exit(0);
+}
+ 
diff --git a/slu/src/produce_action_string_fst b/slu/src/produce_action_string_fst
new file mode 100755
index 0000000000000000000000000000000000000000..6f21d2096e00e9f9d80d7e8e9ef22cc98b993e32
Binary files /dev/null and b/slu/src/produce_action_string_fst differ
diff --git a/slu/src/produce_action_string_fst.c b/slu/src/produce_action_string_fst.c
new file mode 100644
index 0000000000000000000000000000000000000000..3d9ea3bdc43d9d4dae19e033fa736d73d3bf2ce5
--- /dev/null
+++ b/slu/src/produce_action_string_fst.c
@@ -0,0 +1,148 @@
+/*  Produce the simplest string producing a given action  */
+/*  FRED 0215  */
+
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <strings.h>
+
+/*................................................................*/
+
+#define TailleLigne     8000
+
+#define True    1
+#define False   0
+
+void ERREUR(char *ch1,char *ch2)
+{
+fprintf(stderr,"ERREUR : %s %s\n",ch1,ch2);
+exit(0);
+}
+
+void ERREURd(char *ch1, int i)
+{
+fprintf(stderr,"ERREUR : %s %d\n",ch1,i);
+exit(0);
+}
+
+/*................................................................*/
+
+/* format:
+action(1,1,"start_scene1","uno") 2
+action(1,1,"#ENDSEQUENCE(1)","") 3
+action(1,1,"#ENDSECTION(1)","") 4
+action(2,1,"open_scene2","dos") 5
+action(2,1,"open_2A","open_system") 6
+action(2,1,"#ENDSEQUENCE(1)","") 7
+action(2,2,"start_system_voice","tell_me") 8
+action(2,2,"open_2B","open_technical_characteristics") 9
+action(2,2,"open_2B1","read") 10
+action(2,2,"open_2B2","next") 11
+action(2,2,"open_2B3","yes") 12
+action(2,2,"open_2B4","read") 13
+action(2,2,"open_2B5","download") 14
+action(2,2,"open_2C","open_the_terms_and_conditions_of_use_of_body_x_epsilon_system_three_point_zero") 15
+action(2,2,"open_2C1","accept_terms_and_conditions_of_use") 16
+action(2,2,"open_2C2","next") 17
+action(2,2,"open_2D","install_the_new_version_of_me") 18
+action(2,2,"#end","give_me_my_data") 19
+action(2,2,"#ENDSEQUENCE(2)","") 20
+action(2,2,"#ENDSECTION(2)","") 21
+*/
+
+#define MAX_ACTION	2000
+
+typedef struct
+	{
+	char *ch;
+	int nbsec,code;
+	} type_action;
+
+type_action T_action[MAX_ACTION];
+
+void load_action(char *chfile)
+{
+FILE *file;
+char ch[TailleLigne],*chcode;
+int code,i,nb,nbsec;
+if (!(file=fopen(chfile,"rt"))) ERREUR("can't open:",chfile);
+for(nb=0;fgets(ch,TailleLigne,file);) if (strstr(ch,"action("))
+ {
+ if (nb==MAX_ACTION) ERREUR("cste MAX_ACTION too small","");
+ chcode=strtok(ch," \n"); if (chcode) chcode=strtok(NULL," \n"); if (!chcode) ERREUR("bad format:",ch);
+ if (sscanf(ch,"action(%d,",&nbsec)!=1) ERREUR("bad format:",ch);
+ if (sscanf(chcode,"%d",&code)!=1) ERREUR("bad format:",chcode);
+ T_action[nb].ch=strdup(ch);
+ T_action[nb].nbsec=nbsec;
+ T_action[nb].code=code;
+ nb++;
+ }
+if (nb==MAX_ACTION) ERREUR("cste MAX_ACTION too small","");
+T_action[nb].ch=NULL;
+fclose(file);
+}
+
+void print_fst_section(FILE *file, int nbsec, int nbac)
+{
+int i;
+for(i=0;T_action[i].ch;i++) if ((T_action[i].nbsec==nbsec)&&(i!=nbac))
+ fprintf(file,"0\t0\t%d\n",T_action[i].code);
+fprintf(file,"0\t1\t%d\n",T_action[nbac].code);
+fprintf(file,"1\n");
+}
+
+void process_section(int nbsec)
+{
+int i;
+static char ch[TailleLigne];
+FILE *file;
+for(i=0;T_action[i].ch;i++) if (T_action[i].nbsec==nbsec)
+ {
+ sprintf(ch,"temp.tmp");
+ if (!(file=fopen(ch,"wt"))) ERREUR("can't write in:",ch);
+ print_fst_section(file,nbsec,i);
+ fclose(file);
+ sprintf(ch,"fstcompile --acceptor temp.tmp | fstarcsort > temp.fst");
+ system(ch);
+ sprintf(ch,"fstarcsort automate/homeostasis_25nov_section%d.fst | fstcompose - temp.fst | fstshortestpath | fsttopsort | fstproject | fstrmepsilon | fstprint --isymbols=automate/homeostasis_25nov_dico_word.txt | grep '	' | cut -f3 > temp.fst.txt",nbsec);
+ system(ch);
+ if (!(file=fopen("temp.fst.txt","rt"))) ERREUR("can't read:","temp.fst.txt");
+ printf("%s\t",T_action[i].ch);
+ if (fgets(ch,TailleLigne,file))
+  {
+  strtok(ch,"\n"); printf("%s",ch);
+  while ((!feof(file))&&(fgets(ch,TailleLigne,file))) { strtok(ch,"\n"); printf(" %s",ch); }
+  printf("\n");
+  }
+ fclose(file);
+ }
+}
+
+int main(int argc, char **argv)
+{
+int nb;
+char ch[TailleLigne],*chaction;
+
+chaction=NULL;
+if (argc>1)
+ for(nb=1;nb<argc;nb++)
+  if (!strcmp(argv[nb],"-action"))
+   {
+   if (nb+1==argc) ERREUR("must have a value after argument;",argv[nb]);
+   chaction=argv[++nb];
+   }
+  else
+  if (!strcmp(argv[nb],"-h"))
+   {
+   fprintf(stderr,"Syntax: %s [-h] \n",argv[0]);
+   exit(0);
+   }
+  else ERREUR("unknown option:",argv[nb]);
+if (!chaction) ERREUR("bad syntax, check '-h'","");
+
+load_action(chaction);
+for(nb=0;nb<10;nb++) process_section(nb);
+
+exit(0);
+}
+ 
diff --git a/slu/src/rocio_slu.cc b/slu/src/rocio_slu.cc
new file mode 100644
index 0000000000000000000000000000000000000000..70b90eee1f0d4033e70d3834ad0d7053e2dd2701
--- /dev/null
+++ b/slu/src/rocio_slu.cc
@@ -0,0 +1,58 @@
+/*  SLU for Rocio XML  */
+/*  FRED 0215  */
+
+#include "librocio_slu.h"
+
+#define TailleLigne     80000
+#define MAX_FIELD	60000
+
+int main(int argc, char **argv)
+{
+    int nb,prevn;
+    char ch[TailleLigne],*chfileword,*chfileaction,*chfilemodel,*chfileclean,*prevword;
+
+    prevn=0; prevword=NULL;
+    chfileword=chfileaction=chfilemodel=chfileclean=NULL;
+    if (argc>1)
+        for(nb=1;nb<argc;nb++)
+            if (!strcmp(argv[nb],"-word")) { if (nb+1==argc) ERREUR("must have a value after argument;",argv[nb]); chfileword=argv[++nb]; }
+            else
+                if (!strcmp(argv[nb],"-action")) { if (nb+1==argc) ERREUR("must have a value after argument;",argv[nb]); chfileaction=argv[++nb]; }
+                else
+                    if (!strcmp(argv[nb],"-fstmodel")) { if (nb+1==argc) ERREUR("must have a value after argument;",argv[nb]); chfilemodel=argv[++nb]; }
+                    else
+                        if (!strcmp(argv[nb],"-fstclean")) { if (nb+1==argc) ERREUR("must have a value after argument;",argv[nb]); chfileclean=argv[++nb]; }
+                        else
+                            if (!strcmp(argv[nb],"-prevn")) { if (nb+1==argc) ERREUR("must have a value after argument;",argv[nb]); if (sscanf(argv[++nb],"%d",&prevn)!=1) ERREUR("bad value:",argv[nb]); }
+                            else
+                                if (!strcmp(argv[nb],"-prevword")) { if (nb+1==argc) ERREUR("must have a value after argument;",argv[nb]); prevword=argv[++nb]; }
+                                else
+                                    if (!strcmp(argv[nb],"-h"))
+                                      {
+                                      fprintf(stderr,"Syntax: %s [-h] -word <file> -action <file> -fstmodel <file> -fstclean <file> [-prevn <int>][-prevword <string>]\n",argv[0]);
+                                      exit(0);
+                                      }
+                                    else ERREUR("unknown option:",argv[nb]);
+    if ((!chfileword)||(!chfileaction)||(!chfilemodel)||(!chfileclean)) ERREUR("bad syntax, check '-h'","");
+
+    // init
+    slu_t* slu = init_slu(chfileword, chfileaction, chfilemodel, chfileclean);
+
+    // read words from stdin
+    char* words[MAX_FIELD];
+    int num_words = 0;
+
+    while (fgets(ch,TailleLigne,stdin))
+    {
+        for (words[num_words++]=strtok(ch," \t\n\r");(num_words<MAX_FIELD)&& NULL != (words[num_words]=strtok(NULL," \t\n\r")); num_words++) ;
+        if (num_words==MAX_FIELD) ERREUR("cste MAX_FIELD too small","");
+    }
+
+    fprintf(stderr, "%d words read from stdin\n", num_words);
+
+    // run them through system
+    run_slu(slu, words, num_words, prevn, prevword);
+    free_slu(slu);
+    exit(0);
+}
+ 
diff --git a/slu/src/try.csh b/slu/src/try.csh
new file mode 100644
index 0000000000000000000000000000000000000000..9819b6f4aac9edd1751d53666546c66b3fb0c951
--- /dev/null
+++ b/slu/src/try.csh
@@ -0,0 +1,26 @@
+#!/bin/csh
+
+setenv LD_LIBRARY_PATH /storage/raid1/homedirs/frederic.bechet/bolt/errseg_tagger/src/crfsuite-0.12/lib/crf/.libs:/storage/raid1/homedirs/frederic.bechet/bolt/errseg_tagger/src/liblbfgs-1.10/lib/.libs:/storage/raid1/homedirs/frederic.bechet/bolt/errseg_tagger/src/crfsuite-0.12/lib/cqdb/.libs:/storage/raid1/homedirs/frederic.bechet/bolt/errseg_tagger/src/openfst-1.3.3/src/lib/.libs:./src_2015-02-22
+
+if ( $#argv != 2 ) then
+ echo "Syntax: 1=fichier texte  2=numero de la section  / exemple: csh try.csh homeostasis_25nov.asr/sect6.hyp 6"
+ exit
+endif
+
+set DIR_SRC=./src_2015-02-22
+set DIR_DATA=./automate_2015-02-22
+set DICOWORD=$DIR_DATA/homeostasis_25nov_dico_word.txt
+set DICOACTION=$DIR_DATA/homeostasis_25nov_dico_action.txt
+
+set FSTMODEL=$DIR_DATA/homeostasis_25nov_section$2.fst
+set FSTCLEAN=$DIR_DATA/homeostasis_25nov_clean_tail.fst
+
+set OPT = "DEBUT"
+foreach i (`cat $1`)
+ echo "NEW_WORD=($i)  -  HISTORY=($OPT)"
+ echo "$i" | $DIR_SRC/rocio_slu -word $DICOWORD -action $DICOACTION -fstmodel $FSTMODEL -fstclean $FSTCLEAN -prevword "$OPT" | tee toto.tmp
+ set OPT = `cat toto.tmp | grep STRING | cut -d':' -f2`
+ grep -v STRING toto.tmp
+ echo '****************************************************************'
+end
+  
diff --git a/xmlview.py b/xmlview.py
index d6ceee07a32fd8061a2ba212f85e69f4ea690c4d..8fdae41ab03bef6a53a74cba885216810f1e9db9 100644
--- a/xmlview.py
+++ b/xmlview.py
@@ -102,8 +102,12 @@ class Keyword(Gtk.Label):
         self.get_style_context().add_class('keyword')
         self.connect('activate-link', self.link_clicked)
 
+    def highlight(self):
+        self.get_style_context().remove_class('keyword')
+        self.get_style_context().add_class('keyword-highlighted')
+
     def link_clicked(self, widget, uri):
-        actions.perform_action(actions.Action(uri, keyword=widget))
+        actions.perform_action(actions.Action(uri, keyword=widget), False)
         return True
 
 class Text(Gtk.Label):
@@ -130,6 +134,15 @@ class XmlView(Gtk.ScrolledWindow):
             section.add_listener(self.section_clicked)
         self.set_section(0)
 
+        self.keywords = []
+        for section in self.sections:
+            for sequence in section.sequences:
+                for line in sequence.lines:
+                    for element in line.elements:
+                        if hasattr(element, 'action'):
+                            self.keywords.append(element)
+        self.last_highlighted = -1
+
     def get_view(self):
         return self
 
@@ -143,12 +156,6 @@ class XmlView(Gtk.ScrolledWindow):
             vbox.pack_start(self.sections[-1], True, True, 5)
         return vbox
 
-    def get_line_iterator(self):
-        for section in self.sections:
-            for sequence in section.sequences:
-                for line in sequence.lines:
-                    yield line
-
     def get_section(self):
         return int(self.current_section.name) - 1
 
@@ -163,5 +170,15 @@ class XmlView(Gtk.ScrolledWindow):
         self.set_section(int(current.name) - 1)
 
     def highlight(self, action):
-        pass
+        if hasattr(action, 'keyword'):
+            action.keyword.highlight()
+        else:
+            i = self.last_highlighted + 1
+            while i < len(self.keywords):
+                if self.keywords[i].action == action.text:
+                    self.keywords[i].highlight()
+                    self.last_highlighted = i
+                    break
+                i += 1
+