Supervised learning

M. Benesty

2018-01-04

library(fastrtext)

data("train_sentences")
data("test_sentences")

# prepare data
tmp_file_model <- tempfile()

train_labels <- paste0("__label__", train_sentences[,"class.text"])
train_texts <- tolower(train_sentences[,"text"])
train_to_write <- paste(train_labels, train_texts)
train_tmp_file_txt <- tempfile()
writeLines(text = train_to_write, con = train_tmp_file_txt)

test_labels <- paste0("__label__", test_sentences[,"class.text"])
test_labels_without_prefix <- test_sentences[,"class.text"]
test_texts <- tolower(test_sentences[,"text"])
test_to_write <- paste(test_labels, test_texts)

# learn model
execute(commands = c("supervised", "-input", train_tmp_file_txt, "-output", tmp_file_model, "-dim", 20, "-lr", 1, "-epoch", 20, "-wordNgrams", 2, "-verbose", 1))
## 
Read 0M words
## Number of words:  5060
## Number of labels: 15
## 
Progress: 100.0% words/sec/thread: 1527406 lr:  0.000000 loss:  0.305070 ETA:   0h 0m
# load model
model <- load_model(tmp_file_model)
## add .bin extension to the path
# prediction are returned as a list with words and probabilities
predictions <- predict(model, sentences = test_to_write)
print(head(predictions, 5))
## [[1]]
##      OWNX 
## 0.9996521 
## 
## [[2]]
##      MISC 
## 0.9899388 
## 
## [[3]]
##      MISC 
## 0.9953604 
## 
## [[4]]
##      OWNX 
## 0.8782121 
## 
## [[5]]
##      AIMX 
## 0.9875715
# Compute accuracy
mean(names(unlist(predictions)) == test_labels_without_prefix)
## [1] 0.8233333
# because there is only one category by observation, hamming loss will be the same
get_hamming_loss(as.list(test_labels_without_prefix), predictions)
## [1] 0.8233333
# test predictions
predictions <- predict(model, sentences = test_to_write)
print(head(predictions, 5))
## [[1]]
##      OWNX 
## 0.9996521 
## 
## [[2]]
##      MISC 
## 0.9899388 
## 
## [[3]]
##      MISC 
## 0.9953604 
## 
## [[4]]
##      OWNX 
## 0.8782121 
## 
## [[5]]
##      AIMX 
## 0.9875715
# you can get flat list of results when you are retrieving only one label per observation
print(head(predict(model, sentences = test_to_write, simplify = TRUE)))
##      OWNX      MISC      MISC      OWNX      AIMX      CONT 
## 0.9996521 0.9899388 0.9953604 0.8782121 0.9875715 0.4825342
# free memory
unlink(train_tmp_file_txt)
unlink(tmp_file_model)
rm(model)
gc()
##           used (Mb) gc trigger (Mb) max used (Mb)
## Ncells  553859 29.6     940480 50.3   940480 50.3
## Vcells 1142900  8.8    1943194 14.9  1548727 11.9