write ( LABEL " \n " ) print ( "labels.tsv file Created! \n " ) # word vector of embedding.Įmbeddings = tf. With open ( LABEL_FILE, "w" ) as f : for i in range ( LABEL_NUM ): f. getcwd (), "log/lables.tsv" ) print ( "the current LABEL_FILE:", LABEL_FILE, " \n " ) LABEL_NUM = 3 LABEL = # Write label file getcwd (), "log/" ) EMBEDDING_SIZE = 3 VOCA_SIZE = 3 print ( "the current working directory:", LOG_DIR, " \n " ) LABEL_FILE = os. Import os import tensorflow as tf LOG_DIR = os. Let’s say another one using with lable of embedding. so on Tensorboard Projector, each words are represented as index number. Tensorboard –logdir=the-path-you-saved-checkpoint-file-toĪs you can see the code, there is no label for each words. if you completely save checkpoint file, move to directory you saved checkpoint file to.Īnd then prompt in command line like this: In here, you have to save checkpoint file to save all the variables on your model. join ( LOG_DIR, "test_embedding_model.ckpt" ), global_step = 0 ) print ( " \n Model Saved in file: %s" % save_path1 ) writer. run () print ( "assignment: \n ", assignment_ ) print ( "embeddings2: \n ", embeddings2_ ) save_path1 = saver. run () print ( "embedding_ipnut: \n ", embedding_input_ ) print ( "embeddings: \n ", embeddings_ ) assignment_, embeddings2_ = sess. run ( global_init_op ) # print embedding_inputĮmbedding_input_, embeddings_ = sess. graph ) # To initialize all the variable in the default graph assign ( embedding_input ) # To add ops to save and restore all the variable zeros (, name = "embedding_intial_tensor" ), name = "real_Embedding" ) assignment = embeddings. float32, name = "input_embedding-no-label" ) # For input of Tensorboard ProjectorĮmbeddings = tf. getcwd (), "log/" ) EMBEDDING_SIZE = 3 VOCA_SIZE = 3 print ( "the current working directory:", LOG_DIR, " \n " ) # word vector of embedding.Įmbedding_input = tf. # How to use tensorboard for WordEmbedding. Let’s see the result of computation graph above: If ids is, the return value is, word_embedding] If ids is 1, the return value is word_embedding If you enter ids such as a list(), the return value is a list including row value of word_embeddings matrix depending the factors of list. the 1 index row of word_embeddings matrix is returned. If you execute the graph above, the result of tf.nn.embedding_lookup(matrix, ids) is rows equal to ids. run () print ( "= the test result = \n " ) print ( "The result of word_embedding: \n ", embedding_result_test, " \n " ) print ( "The result of embedded_word_ids: \n ", embedded_word_result_test, " \n " ) run ( init ) embedding_result_test, embedded_word_result_test = sess. embedding_lookup ( word_embeddings, ids = 1 ) init = tf. get_variable ( "word_embeddings_test", ) # In here, 1 mean the first row of word_embedding matrix.Įmbedded_word_ids = tf. # word_embeddings is 2 by 2 matrix, each factor is randomized.
0 Comments
Leave a Reply. |
AuthorWrite something about yourself. No need to be fancy, just an overview. ArchivesCategories |