minor bug fixes, to get last few changes to pass tests

This commit is contained in:
Frank Seide 2019-05-15 11:43:20 -07:00
parent 80171f3ca6
commit 356f804293
8 changed files with 15 additions and 11 deletions

@ -1 +1 @@
Subproject commit 142eadddbe04493c1024b42586030b72e9cb7ea2
Subproject commit 71b473f29017933e68a513f7262044c46e39cccc

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from __future__ import print_function

View File

@ -18,7 +18,7 @@ public:
// construct with one or more individual parameters
// Factory("var1", val1, "var2", val2, ...)
template <typename T, typename... Args>
Factory(const std::string& key, T value, Args&&... moreArgs) {
Factory(const std::string& key, T value, Args&&... moreArgs) : Factory() {
setOpts(key, value, std::forward<Args>(moreArgs)...);
}
// construct with options and one or more individual parameters

View File

@ -413,13 +413,17 @@ namespace marian {
selectedEmbs = multiRows(words);
else
selectedEmbs = rows(E_, toWordIndexVector(words));
selectedEmbs = reshape(selectedEmbs, shape);
selectedEmbs = dropout(selectedEmbs, options_->get<float>("dropout", 0.0f), {selectedEmbs->shape()[-3], 1, 1});
return reshape(selectedEmbs, shape);
return selectedEmbs;
}
Expr Embedding::applyIndices(const std::vector<WordIndex>& embIdx, const Shape& shape) const /*override final*/ {
ABORT_IF(factoredVocab_, "Embedding: applyIndices must not be used with a factored vocabulary");
return reshape(rows(E_, embIdx), shape);
auto selectedEmbs = rows(E_, embIdx);
selectedEmbs = reshape(selectedEmbs, shape);
selectedEmbs = dropout(selectedEmbs, options_->get<float>("dropout", 0.0f), { selectedEmbs->shape()[-3], 1, 1 });
return selectedEmbs;
}
// standard encoder word embeddings

View File

@ -61,10 +61,10 @@ public:
Expr seenEmb = graph_->get("Wpos");
int numPos = seenEmb ? seenEmb->shape()[-2] : maxLength;
auto embeddingLayer = embedding()
("prefix", "Wpos") // share positional embeddings across all encoders/decorders
("dimVocab", numPos)
("dimEmb", dimEmb)
auto embeddingLayer = embedding(
"prefix", "Wpos", // share positional embeddings across all encoders/decorders
"dimVocab", numPos,
"dimEmb", dimEmb)
.construct(graph_);
// fill with increasing numbers until current length or maxPos

0
src/tensors/cpu/prod.cpp Normal file → Executable file
View File

View File

@ -20,8 +20,8 @@ int main(int argc, char** argv) {
for(int i = 0; i < 10; ++i) {
g->clear();
auto mask1 = g->dropout(0.2, {10, 3072});
auto mask2 = g->dropout(0.3, {1, 3072});
auto mask1 = g->dropoutMask(0.2, {10, 3072});
auto mask2 = g->dropoutMask(0.3, {1, 3072});
auto mask = mask1 + mask2;
debug(mask1, "mask1");
debug(mask2, "mask2");

0
vs/Marian.vcxproj Normal file → Executable file
View File