[{"title":"(13个子文件1.14MB)基于Keras的attention实战","children":[{"title":"keras-attention-mechanism-master","children":[{"title":"attention_dense.py <span style='color:#111;'>1.82KB</span>","children":null,"spread":false},{"title":"attention_lstm.py <span style='color:#111;'>3.35KB</span>","children":null,"spread":false},{"title":"attention_utils.py <span style='color:#111;'>2.55KB</span>","children":null,"spread":false},{"title":"requirements.txt <span style='color:#111;'>60B</span>","children":null,"spread":false},{"title":"LICENSE <span style='color:#111;'>11.09KB</span>","children":null,"spread":false},{"title":"assets","children":[{"title":"graph_single_attention.png <span style='color:#111;'>433.59KB</span>","children":null,"spread":false},{"title":"lstm_after.png <span style='color:#111;'>46.01KB</span>","children":null,"spread":false},{"title":"attention_1.png <span style='color:#111;'>210.93KB</span>","children":null,"spread":false},{"title":"1.png <span style='color:#111;'>44.91KB</span>","children":null,"spread":false},{"title":"lstm_before.png <span style='color:#111;'>50.41KB</span>","children":null,"spread":false},{"title":"graph_multi_attention.png <span style='color:#111;'>427.01KB</span>","children":null,"spread":false}],"spread":true},{"title":"README.md <span style='color:#111;'>4.77KB</span>","children":null,"spread":false},{"title":".gitignore <span style='color:#111;'>1.14KB</span>","children":null,"spread":false}],"spread":true}],"spread":true}]