Skip to content

Commit

Permalink
minor doc + pep8 updates
Browse files Browse the repository at this point in the history
  • Loading branch information
jayantj committed Jan 11, 2017
1 parent 2f37b04 commit b2ff794
Show file tree
Hide file tree
Showing 3 changed files with 6 additions and 6 deletions.
4 changes: 2 additions & 2 deletions gensim/models/keyedvectors.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,7 +53,7 @@ def word_vec(self, word, use_norm=False):
Example::
>>> trained_model['office']
>>> trained_model.word_vec('office', use_norm=True)
array([ -1.40128313e-02, ...])
"""
Expand Down Expand Up @@ -323,7 +323,7 @@ def doesnt_match(self, words):

if not words:
raise ValueError("cannot select a word from an empty list")
logger.debug("using words %s" % words)
logger.debug("using words %s", words)
vectors = []
for word in words:
try:
Expand Down
2 changes: 1 addition & 1 deletion gensim/models/word2vec.py
Original file line number Diff line number Diff line change
Expand Up @@ -470,7 +470,7 @@ def __init__(
self.train(sentences)

def initialize_word_vectors(self):
self.wv = KeyedVectors() # wv --> word vectors
self.wv = KeyedVectors()

def make_cum_table(self, power=0.75, domain=2**31 - 1):
"""
Expand Down
6 changes: 3 additions & 3 deletions gensim/models/wrappers/fasttext.py
Original file line number Diff line number Diff line change
Expand Up @@ -139,7 +139,7 @@ class FastText(Word2Vec):
"""

def initialize_word_vectors(self):
self.wv = FastTextKeyedVectors() # wv --> word vectors
self.wv = FastTextKeyedVectors()

@classmethod
def train(cls, ft_path, corpus_file, output_file=None, model='cbow', size=100, alpha=0.025, window=5, min_count=5,
Expand Down Expand Up @@ -245,7 +245,7 @@ def delete_training_files(cls, model_file):

def load_binary_data(self, model_binary_file):
"""Loads data from the output binary file created by FastText training"""
with open(model_binary_file, 'rb') as f:
with utils.smart_open(model_binary_file, 'rb') as f:
self.load_model_params(f)
self.load_dict(f)
self.load_vectors(f)
Expand Down Expand Up @@ -329,7 +329,7 @@ def init_ngrams(self):
@staticmethod
def compute_ngrams(word, min_n, max_n):
ngram_indices = []
BOW, EOW = ('<','>') # Used by FastText to attach to all words as prefix and suffix
BOW, EOW = ('<', '>') # Used by FastText to attach to all words as prefix and suffix
extended_word = BOW + word + EOW
ngrams = set()
for i in range(len(extended_word) - min_n + 1):
Expand Down

0 comments on commit b2ff794

Please sign in to comment.