in lstm-text-generation/train_node.js [31:97]
function parseArgs() {
const parser = argparse.ArgumentParser({
description: 'Train an lstm-text-generation model.'
});
parser.addArgument('textDatasetName', {
type: 'string',
choices: Object.keys(TEXT_DATA_URLS),
help: 'Name of the text dataset'
});
parser.addArgument('--gpu', {
action: 'storeTrue',
help: 'Use CUDA GPU for training.'
});
parser.addArgument('--sampleLen', {
type: 'int',
defaultValue: 60,
help: 'Sample length: Length of each input sequence to the model, in ' +
'number of characters.'
});
parser.addArgument('--sampleStep', {
type: 'int',
defaultValue: 3,
help: 'Step length: how many characters to skip between one example ' +
'extracted from the text data to the next.'
});
parser.addArgument('--learningRate', {
type: 'float',
defaultValue: 1e-2,
help: 'Learning rate to be used during training'
});
parser.addArgument('--epochs', {
type: 'int',
defaultValue: 150,
help: 'Number of training epochs'
});
parser.addArgument('--examplesPerEpoch', {
type: 'int',
defaultValue: 10000,
help: 'Number of examples to sample from the text in each training epoch.'
});
parser.addArgument('--batchSize', {
type: 'int',
defaultValue: 128,
help: 'Batch size for training.'
});
parser.addArgument('--validationSplit', {
type: 'float',
defaultValue: 0.0625,
help: 'Validation split for training.'
});
parser.addArgument('--displayLength', {
type: 'int',
defaultValue: 120,
help: 'Length of the sampled text to display after each epoch of training.'
});
parser.addArgument('--savePath', {
type: 'string',
help: 'Path to which the model will be saved (optional)'
});
parser.addArgument('--lstmLayerSize', {
type: 'string',
defaultValue: '128,128',
help: 'LSTM layer size. Can be a single number or an array of numbers ' +
'separated by commas (E.g., "256", "256,128")'
}); // TODO(cais): Support
return parser.parseArgs();
}