Publication date: Oct 23, 2024
********************************************************************** // Werkzeug Debugger
LookupError
LookupError: ********************************************************************** Resource _[93mpunkt_tab_[0m not found. Please use the NLTK Downloader to obtain the resource: _[31m>>> import nltk >>> nltk. download(‘punkt_tab’) _[0m For more information see: https://www. nltk. org/data. html Attempted to load _[93mtokenizers/punkt_tab/english/_[0m Searched in: – ‘/root/nltk_data’ – ‘/usr/local/nltk_data’ – ‘/usr/local/share/nltk_data’ – ‘/usr/local/lib/nltk_data’ – ‘/usr/share/nltk_data’ – ‘/usr/local/share/nltk_data’ – ‘/usr/lib/nltk_data’ – ‘/usr/local/lib/nltk_data’
**********************************************************************
Traceback (most recent call last)
-
File “/usr/local/lib/python3. 9/site-packages/flask/app. py”, line 1498, in
__call__
) -> cabc. Iterable[bytes]:
"""The WSGI server calls the Flask application object as the
WSGI application. This calls :meth:'wsgi_app', which can be
wrapped to apply middleware.
"""
return self. wsgi_app(environ, start_response)
-
File “/usr/local/lib/python3. 9/site-packages/flask/app. py”, line 1476, in
wsgi_app
try:
ctx. push()
response = self. full_dispatch_request()
except Exception as e:
error = e
response = self. handle_exception(e)
except: # noqa: B001
error = sys. exc_info()[1]
raise
return response(environ, start_response)
finally:
-
File “/usr/local/lib/python3. 9/site-packages/flask/app. py”, line 1473, in
wsgi_app
ctx = self. request_context(environ)
error: BaseException | None = None
try:
try:
ctx. push()
response = self. full_dispatch_request()
except Exception as e:
error = e
response = self. handle_exception(e)
except: # noqa: B001
error = sys. exc_info()[1]
-
File “/usr/local/lib/python3. 9/site-packages/flask/app. py”, line 882, in
full_dispatch_request
request_started. send(self, _async_wrapper=self. ensure_sync)
rv = self. preprocess_request()
if rv is None:
rv = self. dispatch_request()
except Exception as e:
rv = self. handle_user_exception(e)
return self. finalize_request(rv)
def finalize_request(
self,
rv: ft. ResponseReturnValue | HTTPException,
-
File “/usr/local/lib/python3. 9/site-packages/flask/app. py”, line 880, in
full_dispatch_request
try:
request_started. send(self, _async_wrapper=self. ensure_sync)
rv = self. preprocess_request()
if rv is None:
rv = self. dispatch_request()
except Exception as e:
rv = self. handle_user_exception(e)
return self. finalize_request(rv)
def finalize_request(
-
File “/usr/local/lib/python3. 9/site-packages/flask/app. py”, line 865, in
dispatch_request
and req. method == "OPTIONS"
):
return self. make_default_options_response()
# otherwise dispatch to the handler for that endpoint
view_args: dict[str, t. Any] = req. view_args # type: ignore[assignment]
return self. ensure_sync(self. view_functions[rule. endpoint])(**view_args) # type: ignore[no-any-return]
def full_dispatch_request(self) -> Response:
"""Dispatches the request and on top of that performs request
pre and postprocessing as well as HTTP exception catching and
error handling.
-
File “/nlpapps/app. py”, line 451, in
process_summarytext
# Find weighted frequency of occurrence
stopwords = nltk. corpus. stopwords. words('english')
word_frequencies = {}
for word in nltk. word_tokenize(formatted_article_text):
if word not in stopwords:
if word not in word_frequencies. keys():
word_frequencies[word] = 1
else:
word_frequencies[word] += 1
-
File “/usr/local/lib/python3. 9/site-packages/nltk/tokenize/__init__. py”, line 142, in
word_tokenize
:param language: the model name in the Punkt corpus
:type language: str
:param preserve_line: A flag to decide whether to sentence tokenize the text or not.
:type preserve_line: bool
"""
sentences = [text] if preserve_line else sent_tokenize(text, language)
return [
token for sent in sentences for token in _treebank_word_tokenizer. tokenize(sent)
]
-
File “/usr/local/lib/python3. 9/site-packages/nltk/tokenize/__init__. py”, line 119, in
sent_tokenize
for the specified language).
:param text: text to split into sentences
:param language: the model name in the Punkt corpus
"""
tokenizer = _get_punkt_tokenizer(language)
return tokenizer. tokenize(text)
# Standard word tokenizer.
_treebank_word_tokenizer = NLTKWordTokenizer()
-
File “/usr/local/lib/python3. 9/site-packages/nltk/tokenize/__init__. py”, line 105, in
_get_punkt_tokenizer
a lru cache for performance.
:param language: the model name in the Punkt corpus
:type language: str
"""
return PunktTokenizer(language)
# Standard sentence tokenizer.
def sent_tokenize(text, language="english"):
"""
-
File “/usr/local/lib/python3. 9/site-packages/nltk/tokenize/punkt. py”, line 1744, in
__init__
Punkt Sentence Tokenizer that loads/saves its parameters from/to data files
"""
def __init__(self, lang="english"):
PunktSentenceTokenizer. __init__(self)
self. load_lang(lang)
def load_lang(self, lang="english"):
from nltk. data import find
lang_dir = find(f"tokenizers/punkt_tab/{lang}/")
-
File “/usr/local/lib/python3. 9/site-packages/nltk/tokenize/punkt. py”, line 1749, in
load_lang
self. load_lang(lang)
def load_lang(self, lang="english"):
from nltk. data import find
lang_dir = find(f"tokenizers/punkt_tab/{lang}/")
self. _params = load_punkt_params(lang_dir)
self. _lang = lang
def save_params(self):
save_punkt_params(self. _params, dir=f"/tmp/{self. _lang}")
-
File “/usr/local/lib/python3. 9/site-packages/nltk/data. py”, line 579, in
find
)
msg += "
Searched in:" + "". join("
- %r" % d for d in paths)
sep = "*" * 70
resource_not_found = f"
{sep}
{msg}
{sep}
"
raise LookupError(resource_not_found)
def retrieve(resource_url, filename=None, verbose=True):
"""
Copy the given resource to a local file. If no filename is
LookupError: ********************************************************************** Resource _[93mpunkt_tab_[0m not found. Please use the NLTK Downloader to obtain the resource: _[31m>>> import nltk >>> nltk. download(‘punkt_tab’) _[0m For more information see: https://www. nltk. org/data. html Attempted to load _[93mtokenizers/punkt_tab/english/_[0m Searched in: – ‘/root/nltk_data’ – ‘/usr/local/nltk_data’ – ‘/usr/local/share/nltk_data’ – ‘/usr/local/lib/nltk_data’ – ‘/usr/share/nltk_data’ – ‘/usr/local/share/nltk_data’ – ‘/usr/lib/nltk_data’ – ‘/usr/local/lib/nltk_data’
**********************************************************************
This is the Copy/Paste friendly version of the traceback.
LookupError: ********************************************************************** Resource _[93mpunkt_tab_[0m not found. Please use the NLTK Downloader to obtain the resource: _[31m>>> import nltk >>> nltk. download(‘punkt_tab’) _[0m For more information see: https://www. nltk. org/data. html Attempted to load _[93mtokenizers/punkt_tab/english/_[0m Searched in: – ‘/root/nltk_data’ – ‘/usr/local/nltk_data’ – ‘/usr/local/share/nltk_data’ – ‘/usr/local/lib/nltk_data’ – ‘/usr/share/nltk_data’ – ‘/usr/local/share/nltk_data’ – ‘/usr/lib/nltk_data’ – ‘/usr/local/lib/nltk_data’
**********************************************************************
Console Locked
The console is locked and needs to be unlocked by entering the PIN. You can find the PIN printed out on the standard output of your shell that runs the server.
Concepts | Keywords |
---|---|
Administrationapproved | Bramante |
Diabetes | Control |
Hispanic | Controlled |
Microbiology | Cov |
Saturday | Covid |
Diabetes | |
Electronic | |
Erbelding | |
Led | |
Long | |
Metformin | |
National | |
Randomized | |
Sars | |
Trial |
Semantics
Type | Source | Name |
---|---|---|
drug | DRUGBANK | Coenzyme M |
disease | MESH | Allergy |
disease | MESH | infection |
disease | MESH | viral load |
disease | MESH | type 2 diabetes |
disease | MESH | contraindication |
drug | DRUGBANK | Montelukast |
drug | DRUGBANK | Ivermectin |
drug | DRUGBANK | Fluticasone |
drug | DRUGBANK | Fluvoxamine |
disease | IDO | intervention |
disease | MESH | obesity |
drug | DRUGBANK | Methylphenidate |
pathway | REACTOME | Infectious disease |
disease | MESH | Infectious Disease |
disease | MESH | prediabetes |
disease | MESH | death |
drug | DRUGBANK | Metformin |
disease | MESH | Long COVID |
disease | MESH | SARS-CoV-2 infection |
pathway | REACTOME | SARS-CoV-2 Infection |