[英]TypeError:__init__() missing 2 required positional arguments
我目前遇到這個錯誤。 我不知道這個錯誤是由什么引起的,因為我已經在我的代碼中聲明了位置 arguments path2
和path3
但是錯誤說這兩個 arguments 丟失。
錯誤消息: TypeError: __init__() missing 2 required positional arguments: 'path2' and 'path3'
這是我的代碼:
import os
from tqdm import tqdm
from utils import SOS, EOS, UNK, process
class Corpus(object):
def __init__(self, path, path2, path3, order, lower=False, max_lines=-1):
self.order = order
self.lower = lower
self.max_lines = max_lines
self.vocab = set()
self.train = self.tokenize(os.path.join(path), training_set=True)
self.valid = self.tokenize(os.path.join(path2))
self.test = self.tokenize(os.path.join(path3))
def tokenize(self, path, training_set=False):
"""Tokenizes a text file."""
#assert os.path.exists(path)
with open(path, path2, path3) as fin:
num_lines = sum(1 for _ in fin.readlines())
with open(path, path2, path3, 'r', encoding="utf8") as f:
words = []
for i, line in enumerate(tqdm(f, total=num_lines)):
if self.max_lines > 0 and i > self.max_lines:
break
line = line.strip()
if not line:
continue # Skip empty lines.
elif line.startswith('='):
continue # Skip headers.
else:
sentence = (self.order - 1) * [SOS] + \
[process(word, self.lower) for word in line.split()] + [EOS]
if training_set:
words.extend(sentence)
self.vocab.update(sentence)
else:
sentence = [word if word in self.vocab else UNK for word in sentence]
words.extend(sentence)
return words
if __name__ == '__main__':
path = 'C://Users//supre//Documents//Python Programme//kenlm//wikitext-2//wiki.train.tokens'
path2 = 'C://Users//supre//Documents//Python Programme//kenlm//wikitext-2//wiki.valid.tokens'
path3 = 'C://Users//supre//Documents//Python Programme//kenlm//wikitext-2//wiki.test.tokens'
corpus = Corpus(path, order=3)
print(len(corpus.test))
print(corpus.test[:100])
提前感謝您的每一個幫助和建議:)
在調用 class Corpus
的 object 時需要傳遞這些參數。
corpus = Corpus(path, path2, path3, order=3)
構造函數中的arguments的名字和你傳給它的變量名沒有任何關系,所以你需要全部傳遞,沒有取同名變量的機制
class Corpus(object):
def __init__(self, path, path2, path3, order, lower=False, max_lines=-1):
self.order = order
self.lower = lower
self.max_lines = max_lines
self.vocab = set()
self.train = self.tokenize(path, training_set=True)
self.valid = self.tokenize(path2)
self.test = self.tokenize(path3)
if __name__ == '__main__':
pa = 'C://Users//...//kenlm//wikitext-2//wiki.train.tokens'
pa2 = 'C://Users//...//kenlm//wikitext-2//wiki.valid.tokens'
pa3 = 'C://Users//...//kenlm//wikitext-2//wiki.test.tokens'
corpus = Corpus(pa, pa2, pa3, order=3)
帶有 1 個變量的os.path.join
也沒有什么特別的
聲明:本站的技術帖子網頁,遵循CC BY-SA 4.0協議,如果您需要轉載,請注明本站網址或者原文地址。任何問題請咨詢:yoyou2525@163.com.