mirror of
https://github.com/guillaume-be/rust-bert.git
synced 2024-10-26 14:07:25 +03:00
Added integration test for OpenAI GPT
This commit is contained in:
parent
d820ea8eea
commit
89b486fb12
@ -66,7 +66,6 @@ fn main() -> failure::Fallible<()> {
|
||||
|
||||
let next_word_id = output.get(0).get(-1).argmax(-1, true).int64_value(&[0]);
|
||||
let next_word = tokenizer.decode(vec!(next_word_id), true, true);
|
||||
println!("Output: {:?}", output);
|
||||
println!("Provided input: {}", input[0]);
|
||||
println!("Next word: {}", next_word);
|
||||
|
||||
|
61
tests/openai_gpt.rs
Normal file
61
tests/openai_gpt.rs
Normal file
@ -0,0 +1,61 @@
|
||||
use std::path::PathBuf;
|
||||
use tch::{Device, nn, Tensor};
|
||||
use rust_tokenizers::{TruncationStrategy, Tokenizer, OpenAiGptTokenizer};
|
||||
use rust_bert::gpt2::gpt2::Gpt2Config;
|
||||
use rust_bert::common::config::Config;
|
||||
use rust_bert::openai_gpt::openai_gpt::OpenAIGPTLMHeadModel;
|
||||
|
||||
#[test]
|
||||
fn openai_gpt_lm_model() -> failure::Fallible<()> {
|
||||
// Resources paths
|
||||
let mut home: PathBuf = dirs::home_dir().unwrap();
|
||||
home.push("rustbert");
|
||||
home.push("openai-gpt");
|
||||
let config_path = &home.as_path().join("config.json");
|
||||
let vocab_path = &home.as_path().join("vocab.txt");
|
||||
let merges_path = &home.as_path().join("merges.txt");
|
||||
let weights_path = &home.as_path().join("model.ot");
|
||||
|
||||
// Set-up masked LM model
|
||||
let device = Device::Cpu;
|
||||
let mut vs = nn::VarStore::new(device);
|
||||
let tokenizer = OpenAiGptTokenizer::from_file(vocab_path.to_str().unwrap(), merges_path.to_str().unwrap(), true);
|
||||
let config = Gpt2Config::from_file(config_path);
|
||||
let openai_gpt = OpenAIGPTLMHeadModel::new(&vs.root(), &config);
|
||||
vs.load(weights_path)?;
|
||||
|
||||
// Define input
|
||||
let input = ["Wondering what the next word will"];
|
||||
let tokenized_input = tokenizer.encode_list(input.to_vec(), 128, &TruncationStrategy::LongestFirst, 0);
|
||||
let max_len = tokenized_input.iter().map(|input| input.token_ids.len()).max().unwrap();
|
||||
let tokenized_input = tokenized_input.
|
||||
iter().
|
||||
map(|input| input.token_ids.clone()).
|
||||
map(|mut input| {
|
||||
input.extend(vec![0; max_len - input.len()]);
|
||||
input
|
||||
}).
|
||||
map(|input|
|
||||
Tensor::of_slice(&(input))).
|
||||
collect::<Vec<_>>();
|
||||
let input_tensor = Tensor::stack(tokenized_input.as_slice(), 0).to(device);
|
||||
|
||||
// Forward pass
|
||||
let (output, _, _) = openai_gpt.forward_t(
|
||||
&Some(input_tensor),
|
||||
&None,
|
||||
&None,
|
||||
&None,
|
||||
&None,
|
||||
false).unwrap();
|
||||
|
||||
let next_word_id = output.get(0).get(-1).argmax(-1, true).int64_value(&[0]);
|
||||
let next_word = tokenizer.decode(vec!(next_word_id), true, true);
|
||||
|
||||
assert_eq!(output.size(), vec!(1, 6, 40478));
|
||||
assert!((output.double_value(&[0, output.size()[1] - 1, next_word_id]) - (9.1056)).abs() < 1e-4);
|
||||
assert_eq!(next_word_id, 580i64);
|
||||
assert_eq!(next_word, String::from("be"));
|
||||
|
||||
Ok(())
|
||||
}
|
Loading…
Reference in New Issue
Block a user