diff --git a/UD_any/data/Makefile b/UD_any/data/Makefile
index 5dec568eff98a4c47e218cdeaf9ba223cd6e7364..49aea81e1f9fc5fbc29803d21d1b6d40a8d22e77 100644
--- a/UD_any/data/Makefile
+++ b/UD_any/data/Makefile
@@ -11,10 +11,8 @@ TEST_FILES=$(shell find $(CORPUS) -type f -name '*test*.conllu')
 #This part is for lemmatizer rules and excpetions computation
 THRESHOLD=10
 FPLM_FILENAME=fplm
-RULES_FILENAME=lemmatizer_rules.ts
-EXCEPTIONS_FPLM_FILENAME=maca_trans_lemmatizer_exceptions.fplm
 
-all: tokenizer.ts segmenter.ts texts all_no_test.conllu columns $(FPLM_FILENAME) $(RULES_FILENAME)
+all: tokenizer.ts segmenter.ts texts all_no_test.conllu columns
 	rm -f col_*\.txt
 	rm -f all_no_test.conllu
 
@@ -54,16 +52,9 @@ texts:
 $(FPLM_FILENAME): all_no_test.conllu $(MCD)
 	$(SCRIPTS)/conllu2fplm.py $< $(MCD) > $@
 
-$(RULES_FILENAME): $(FPLM_FILENAME)
-	macaon_compute_l_rules -f $(FPLM_FILENAME) -e $(EXCEPTIONS_FPLM_FILENAME) -r tmp.txt -t $(THRESHOLD)
-	rm -f tmp.txt
-	echo -e "Default : NOTHING\nTOLOWER b.0 LEMMA\nTOUPPER b.0 LEMMA" > lemmatizer_case.ts
-
 clean:
 	- rm -f *\.txt
 	- rm -f *\.conll*
 	- rm -f *\.ts
-	- rm -f $(RULES_FILENAME)
-	- rm -f $(EXCEPTIONS_FPLM_FILENAME)
 	- rm -f $(FPLM_FILENAME)