Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
5 changes: 3 additions & 2 deletions core/utils/harvesters.py
Original file line number Diff line number Diff line change
Expand Up @@ -73,7 +73,7 @@ def harvest_documents(self) -> Generator[Dict[str, Any], None, None]:
logging.info(f"Fetching AM documents from: {url}")

# Faz requisição
response = fetch_data(url, json=True, timeout=self.timeout, verify=True)
response = fetch_data(url, json=True, timeout=self.timeout, verify=False)

# Processa objetos retornados
objects = response.get("objects", [])
Expand Down Expand Up @@ -198,7 +198,8 @@ def harvest_documents(self) -> Generator[Dict[str, Any], None, None]:
logging.info(f"Fetching OPAC documents from: {url}")

# Faz requisição
response = fetch_data(url, json=True, timeout=self.timeout, verify=True)
# verify=False é necessário para evitar erros de SSL em ambientes onde o certificado do OPAC não é reconhecido
response = fetch_data(url, json=True, timeout=self.timeout, verify=False)

# Define total de páginas na primeira iteração
if total_pages is None:
Expand Down
2 changes: 1 addition & 1 deletion issue/articlemeta/loader.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ def harvest_and_load_issue(user, url, code, collection_acron, processing_date, f
def harvest_issue_data(url, timeout=30):
try:
item = {}
item["data"] = utils.fetch_data(url, json=True, timeout=timeout, verify=True)
item["data"] = utils.fetch_data(url, json=True, timeout=timeout, verify=False)
item["status"] = "pending"
return item
except Exception as e:
Expand Down
4 changes: 2 additions & 2 deletions journal/sources/article_meta.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ def _get_collection_journals(offset=None, limit=None, collection=None):
f"https://articlemeta.scielo.org/api/v1/journal/identifiers/?collection={collection}&limit={limit}"
+ offset
)
data = fetch_data(url, json=True, timeout=30, verify=True)
data = fetch_data(url, json=True, timeout=30, verify=False)
return data


Expand All @@ -37,7 +37,7 @@ def process_journal_article_meta(collection, limit, user):
for journal in data["objects"]:
issn = journal["code"]
url_journal = f"https://articlemeta.scielo.org/api/v1/journal/?collection={collection}&issn={issn}"
data_journal = fetch_data(url_journal, json=True, timeout=30, verify=True)
data_journal = fetch_data(url_journal, json=True, timeout=30, verify=False)
obj_collection = Collection.objects.get(acron3=collection)
AMJournal.create_or_update(
pid=issn,
Expand Down
Loading