diff --git a/.github/workflows/python-lint.yaml b/.github/workflows/python-lint.yaml
index c1b13c6d..daa24d19 100644
--- a/.github/workflows/python-lint.yaml
+++ b/.github/workflows/python-lint.yaml
@@ -13,7 +13,7 @@ on:
- 'test/**.py'
jobs:
- build:
+ lint:
runs-on: ubuntu-latest
steps:
diff --git a/.github/workflows/python-pytest.yaml b/.github/workflows/python-pytest.yaml
index aabbf9df..3e3bbfa5 100644
--- a/.github/workflows/python-pytest.yaml
+++ b/.github/workflows/python-pytest.yaml
@@ -18,8 +18,8 @@ on:
- 'poetry.lock'
jobs:
- build:
- name: pytest ${{ matrix.python-version }}
+ pytest:
+ name: pytest ${{ matrix.python-version }}/${{ matrix.mediawiki-version }}
runs-on: ubuntu-latest
container: ubuntu
@@ -32,6 +32,7 @@ jobs:
strategy:
matrix:
python-version: [ '3.9', '3.10', '3.11', '3.12', '3.13', '3.14-dev' ]
+ mediawiki-version: [ '1.39.7-wmde.18', '1.40.3-wmde.19', '1.41.1-wmde.20' ]
steps:
- uses: actions/checkout@v5
@@ -49,6 +50,13 @@ jobs:
- name: Install poetry
run: pipx install poetry
+ - name: Set up Docker containers with v${{ matrix.mediawiki-version }}
+ run: |
+ docker compose -f ./docker-compose.yml -f ./docker-compose.extra.yml up -d
+ working-directory: test/docker-compose
+ env:
+ WIKIBASE_BUNDLE_IMAGE_NAME: wikibase/wikibase-bundle:${{ matrix.mediawiki-version }}
+
- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v6.0.0
with:
diff --git a/pyproject.toml b/pyproject.toml
index 2cfeab27..58aedbf7 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -52,6 +52,7 @@ optional = true
[tool.poetry.group.dev.dependencies]
pytest = "*"
+pytest-order = "*"
pylint = "*"
pylint-exit = "*"
mypy = "*"
diff --git a/test/docker-compose/.env b/test/docker-compose/.env
new file mode 100644
index 00000000..90c67614
--- /dev/null
+++ b/test/docker-compose/.env
@@ -0,0 +1,39 @@
+## Example / Template .env file for Wikibase release pipeline docker-compose example
+# WARNING: Do not add comments on the same line as env vars, as in some environments they will be included in the var!
+
+## Image Configuration
+WDQS_IMAGE_NAME=wikibase/wdqs:0.3.135-wmde.18
+WDQS_FRONTEND_IMAGE_NAME=wikibase/wdqs-frontend:wmde.18
+WDQS_PROXY_IMAGE_NAME=wikibase/wdqs-proxy:wmde.18
+MYSQL_IMAGE_NAME=mariadb:10.11
+
+## Mediawiki Configuration
+## Admin password
+## Passwords must be at least 10 characters.
+## Your password must be different from your username.
+## Your password must not appear within your username.
+## The password must not be in a list of very commonly used passwords. Please choose a unique password.
+MW_ADMIN_PASS=change-this-password
+MW_ADMIN_NAME=admin
+MW_ADMIN_EMAIL=admin@example.com
+MW_SECRET_KEY=some-secret-key
+MW_WG_ENABLE_UPLOADS=false
+
+## Jobrunner Configuration
+MAX_JOBS=1
+
+## Database Configuration
+DB_NAME=my_wiki
+DB_USER=sqluser
+DB_PASS=change-this-sqlpassword
+
+## Wikibase Configuration
+WIKIBASE_PINGBACK=false
+# wikibase.svc is the internal docker hostname, change this value to the public hostname
+WIKIBASE_HOST=wikibase.svc
+WIKIBASE_PORT=80
+
+## WDQS-frontend Configuration
+# wdqs-frontend.svc is the internal docker hostname, change this value to the public hostname
+WDQS_FRONTEND_HOST=wdqs-frontend.svc
+WDQS_FRONTEND_PORT=8834
diff --git a/test/docker-compose/README.md b/test/docker-compose/README.md
new file mode 100644
index 00000000..f2256318
--- /dev/null
+++ b/test/docker-compose/README.md
@@ -0,0 +1 @@
+Come from https://github.com/wmde/wikibase-release-pipeline/tree/main/example
diff --git a/test/docker-compose/docker-compose.extra.yml b/test/docker-compose/docker-compose.extra.yml
new file mode 100644
index 00000000..33aec078
--- /dev/null
+++ b/test/docker-compose/docker-compose.extra.yml
@@ -0,0 +1,69 @@
+# Additional services example
+version: '3.4'
+
+services:
+
+ wdqs-frontend:
+ image: "${WDQS_FRONTEND_IMAGE_NAME}"
+ restart: unless-stopped
+ ports:
+ - "${WDQS_FRONTEND_PORT}:80"
+ depends_on:
+ - wdqs-proxy
+ networks:
+ default:
+ aliases:
+ - ${WDQS_FRONTEND_HOST}
+ environment:
+ - WIKIBASE_HOST=${WIKIBASE_HOST}
+ - WDQS_HOST=wdqs-proxy.svc
+ wdqs:
+ image: "${WDQS_IMAGE_NAME}"
+ restart: unless-stopped
+ command: /runBlazegraph.sh
+ volumes:
+ - query-service-data:/wdqs/data
+ networks:
+ default:
+ aliases:
+ - wdqs.svc
+ environment:
+ - WIKIBASE_HOST=${WIKIBASE_HOST}
+ - WDQS_HOST=wdqs.svc
+ - WDQS_PORT=9999
+ expose:
+ - 9999
+
+ wdqs-proxy:
+ image: "${WDQS_PROXY_IMAGE_NAME}"
+ restart: unless-stopped
+ environment:
+ - PROXY_PASS_HOST=wdqs.svc:9999
+ depends_on:
+ - wdqs
+ networks:
+ default:
+ aliases:
+ - wdqs-proxy.svc
+
+ wdqs-updater:
+ image: "${WDQS_IMAGE_NAME}"
+ restart: unless-stopped
+ command: /runUpdate.sh
+ depends_on:
+ - wdqs
+ - wikibase
+ networks:
+ default:
+ aliases:
+ - wdqs-updater.svc
+ environment:
+ - WIKIBASE_HOST=${WIKIBASE_HOST}
+ - WDQS_HOST=wdqs.svc
+ - WDQS_PORT=9999
+ # CONFIG - WIKIBASE_SCHEME can be set to 'https' if the updater should expect https concept uris
+
+volumes:
+ LocalSettings:
+ query-service-data:
+ mediawiki-mysql-data:
diff --git a/test/docker-compose/docker-compose.yml b/test/docker-compose/docker-compose.yml
new file mode 100644
index 00000000..c861701c
--- /dev/null
+++ b/test/docker-compose/docker-compose.yml
@@ -0,0 +1,84 @@
+# Example Wikibase docker-compose setup
+version: '3.4'
+
+x-common-variables: &wikibase_variables
+ DB_SERVER: mysql.svc:3306
+ MW_ADMIN_NAME: ${MW_ADMIN_NAME}
+ MW_ADMIN_PASS: ${MW_ADMIN_PASS}
+ MW_ADMIN_EMAIL: ${MW_ADMIN_EMAIL}
+ MW_WG_SECRET_KEY: ${MW_SECRET_KEY}
+ # Disable jobs running after requests when wikibase_jobrunner is defined
+ MW_WG_JOB_RUN_RATE: 0
+ DB_USER: ${DB_USER}
+ DB_PASS: ${DB_PASS}
+ DB_NAME: ${DB_NAME}
+ WIKIBASE_HOST: ${WIKIBASE_HOST}
+
+services:
+ wikibase:
+ image: "${WIKIBASE_BUNDLE_IMAGE_NAME}"
+ links:
+ - mysql
+ depends_on:
+ - mysql
+ restart: unless-stopped
+ ports:
+ - "${WIKIBASE_PORT}:80"
+ volumes:
+ ## This shares the configuration with jobrunner
+ - shared:/var/www/html/:rw
+ ## Uncomment this next line to mount your own LocalSettings.php file
+ ## also uncomment the same line in the wikibase_jobrunner service!
+ #- ./LocalSettings.php:/var/www/html/LocalSettings.d/LocalSettings.override.php
+ networks:
+ default:
+ aliases:
+ - ${WIKIBASE_HOST}
+ - wikibase-docker.svc
+ environment:
+ <<: *wikibase_variables
+ WIKIBASE_PINGBACK:
+ MW_WG_ENABLE_UPLOADS:
+
+ wikibase_jobrunner:
+ image: "${WIKIBASE_BUNDLE_IMAGE_NAME}"
+ entrypoint: /bin/bash
+ command: /jobrunner-entrypoint.sh
+ links:
+ - mysql
+ depends_on:
+ - mysql
+ restart: always
+ volumes:
+ - shared:/shared/:ro
+ - ./jobrunner-entrypoint.sh:/jobrunner-entrypoint.sh
+ ## Uncomment this next line to mount your own LocalSettings.php file
+ ## also uncomment the same line in the wikibase service
+ #- ./LocalSettings.php:/var/www/html/LocalSettings.d/LocalSettings.override.php
+ networks:
+ default:
+ aliases:
+ - wikibase-jobrunner.svc
+ environment:
+ <<: *wikibase_variables
+ MAX_JOBS: ${MAX_JOBS}
+
+ mysql:
+ image: "${MYSQL_IMAGE_NAME}"
+ restart: unless-stopped
+ volumes:
+ - mediawiki-mysql-data:/var/lib/mysql
+ environment:
+ # CONFIG - Change the default values below (should match values passed to wikibase)
+ MYSQL_DATABASE: ${DB_NAME}
+ MYSQL_USER: ${DB_USER}
+ MYSQL_PASSWORD: ${DB_PASS}
+ MYSQL_RANDOM_ROOT_PASSWORD: 'yes'
+ networks:
+ default:
+ aliases:
+ - mysql.svc
+
+volumes:
+ shared:
+ mediawiki-mysql-data:
diff --git a/test/docker-compose/jobrunner-entrypoint.sh b/test/docker-compose/jobrunner-entrypoint.sh
new file mode 100644
index 00000000..4a67c0b5
--- /dev/null
+++ b/test/docker-compose/jobrunner-entrypoint.sh
@@ -0,0 +1,27 @@
+#!/bin/bash
+
+# Originally inspired by Brennen Bearnes jobrunner entrypoint
+# https://gerrit.wikimedia.org/r/plugins/gitiles/releng/dev-images/+/refs/heads/master/common/jobrunner/entrypoint.sh
+
+# Wait for the db to come up
+/wait-for-it.sh "$DB_SERVER" -t 300
+# Sometimes it appears to come up and then go back down meaning MW install fails
+# So wait for a second and double check!
+sleep 1
+/wait-for-it.sh "$DB_SERVER" -t 300
+
+kill_runner() {
+ kill "$PID" 2> /dev/null
+}
+trap kill_runner SIGTERM
+
+while true; do
+ if [ -e /shared/LocalSettings.php ]; then
+ php maintenance/runJobs.php --wait --maxjobs="$MAX_JOBS" --conf /shared/LocalSettings.php &
+ PID=$!
+ wait "$PID"
+ else
+ echo "LocalSettings.php not shared yet - waiting for 10 seconds."
+ sleep 10
+ fi
+done
\ No newline at end of file
diff --git a/test/test_all.py b/test/test_all.py
deleted file mode 100644
index e9d2c455..00000000
--- a/test/test_all.py
+++ /dev/null
@@ -1,228 +0,0 @@
-import copy
-import unittest
-
-from wikibaseintegrator import WikibaseIntegrator, datatypes, wbi_fastrun
-from wikibaseintegrator.datatypes import BaseDataType, Item
-from wikibaseintegrator.entities import ItemEntity
-from wikibaseintegrator.wbi_config import config as wbi_config
-from wikibaseintegrator.wbi_enums import ActionIfExists, WikibaseDatatype
-from wikibaseintegrator.wbi_fastrun import get_fastrun_container
-
-wbi_config['USER_AGENT'] = 'WikibaseIntegrator-pytest/1.0 (test_all.py)'
-
-wbi = WikibaseIntegrator()
-
-
-class TestDataType(unittest.TestCase):
- def test_quantity(self):
- dt = datatypes.Quantity(amount='34.5', prop_nr='P43')
-
- dt_json = dt.get_json()
-
- assert dt_json['mainsnak']['datatype'] == WikibaseDatatype.QUANTITY.value
-
- value = dt_json['mainsnak']['datavalue']
-
- assert value['value']['amount'] == '+34.5'
- assert value['value']['unit'] == '1'
-
- dt2 = datatypes.Quantity(amount='34.5', prop_nr='P43', upper_bound='35.3', lower_bound='33.7', unit="Q11573")
-
- value = dt2.get_json()['mainsnak']['datavalue']
-
- assert value['value']['amount'] == '+34.5'
- assert value['value']['unit'] == 'http://www.wikidata.org/entity/Q11573'
- assert value['value']['upperBound'] == '+35.3'
- assert value['value']['lowerBound'] == '+33.7'
-
- def test_geoshape(self):
- dt = datatypes.GeoShape(value='Data:Inner_West_Light_Rail_stops.map', prop_nr='P43')
-
- dt_json = dt.get_json()
-
- assert dt_json['mainsnak']['datatype'] == WikibaseDatatype.GEOSHAPE.value
-
- value = dt_json['mainsnak']['datavalue']
-
- assert value['value'] == 'Data:Inner_West_Light_Rail_stops.map'
-
- assert value['type'] == 'string'
-
-
-class TestFastRun(unittest.TestCase):
- """
- some basic tests for fastrun mode
- """
-
- def test_fastrun(self):
- statements = [
- datatypes.ExternalID(value='P40095', prop_nr='P352'),
- datatypes.ExternalID(value='YER158C', prop_nr='P705')
- ]
-
- frc = wbi_fastrun.FastRunContainer(base_filter=[BaseDataType(prop_nr='P352'), datatypes.Item(prop_nr='P703', value='Q27510868')], base_data_type=datatypes.BaseDataType)
-
- fastrun_result = frc.write_required(data=statements)
-
- if fastrun_result:
- message = 'fastrun failed'
- else:
- message = 'successful fastrun'
-
- # here, fastrun should succeed, if not, test failed
- if fastrun_result:
- raise ValueError
-
- def test_fastrun_label(self):
- # tests fastrun label, description and aliases, and label in another language
- frc = get_fastrun_container(base_filter=[datatypes.ExternalID(value='/m/02j71', prop_nr='P646')])
- item = WikibaseIntegrator().item.get('Q2')
-
- assert item.labels.get(language='en') == "Earth"
- descr = item.descriptions.get(language='en')
- assert len(descr) > 3
- assert "Planet Earth" in item.aliases.get()
-
- assert list(frc.get_language_data("Q2", 'en', 'label'))[0] == item.labels.get(language='en')
- assert frc.check_language_data("Q2", ['not the Earth'], 'en', 'label')
- assert "Planet Earth" in item.aliases.get()
- assert "planet" in item.descriptions.get()
-
- assert item.labels.get('es') == "Tierra"
-
- item.descriptions.set(value=descr)
- item.descriptions.set(value="fghjkl")
- assert item.get_json()['descriptions']['en'] == {'language': 'en', 'value': 'fghjkl'}
- item.labels.set(value="Earth")
- item.labels.set(value="xfgfdsg")
- assert item.get_json()['labels']['en'] == {'language': 'en', 'value': 'xfgfdsg'}
- item.aliases.set(values=["fake alias"], action_if_exists=ActionIfExists.APPEND_OR_REPLACE)
- assert {'language': 'en', 'value': 'fake alias'} in item.get_json()['aliases']['en']
-
- # something that's empty (for now.., can change, so this just makes sure no exception is thrown)
- frc.check_language_data("Q2", ['Ewiase'], 'ak', 'label')
- frc.check_language_data("Q2", ['not Ewiase'], 'ak', 'label')
- frc.check_language_data("Q2", [''], 'ak', 'description')
- frc.check_language_data("Q2", [], 'ak', 'aliases')
- frc.check_language_data("Q2", ['sdf', 'sdd'], 'ak', 'aliases')
-
- item.labels.get(language="ak")
- item.descriptions.get(language='ak')
- item.aliases.get(language="ak")
- item.labels.set(value="label", language="ak")
- item.descriptions.set(value="d", language="ak")
- item.aliases.set(values=["a"], language="ak", action_if_exists=ActionIfExists.APPEND_OR_REPLACE)
-
-
-def test_sitelinks():
- item = wbi.item.get('Q622901')
- item.claims.add(datatypes.Item(value='Q12136', prop_nr='P31'))
- assert item.sitelinks.get('enwiki') is not None
- item.sitelinks.set(site="enwiki", title="something")
- assert item.sitelinks.get('enwiki').title == "something"
- assert item.sitelinks.get('enwiki') is not None
-
-
-def test_nositelinks():
- # this item doesn't and probably won't ever have any sitelinks (but who knows?? maybe one day..)
- item = wbi.item.get('Q27869338')
- item.claims.add(datatypes.Item(value='Q5', prop_nr='P31'))
- assert item.sitelinks.get('enwiki') is None
- item.sitelinks.set(site="enwiki", title="something")
- assert item.sitelinks.get('enwiki').title == "something"
- assert item.sitelinks.get('enwiki') is not None
-
-
-####
-# tests for statement equality, with and without refs
-####
-def test_ref_equals():
- # statements are identical
- oldref = [datatypes.ExternalID(value='P58742', prop_nr='P352'),
- datatypes.Item(value='Q24784025', prop_nr='P527'),
- datatypes.Time(time='+2001-12-31T00:00:00Z', prop_nr='P813')]
- olditem = datatypes.Item(value='Q123', prop_nr='P123', references=[oldref])
- newitem = copy.deepcopy(olditem)
-
- assert olditem.equals(newitem, include_ref=False)
- assert olditem.equals(newitem, include_ref=True)
-
- # dates are a month apart
- newitem = copy.deepcopy(olditem)
- newitem.references.remove(datatypes.Time(time='+2001-12-31T00:00:00Z', prop_nr='P813'))
- newitem.references.add(datatypes.Time(time='+2002-01-31T00:00:00Z', prop_nr='P813'))
- assert olditem.equals(newitem, include_ref=False)
- assert not olditem.equals(newitem, include_ref=True)
-
- # multiple refs
- newitem = copy.deepcopy(olditem)
- newitem.references.add(datatypes.ExternalID(value='99999', prop_nr='P352'))
- assert olditem.equals(newitem, include_ref=False)
- assert not olditem.equals(newitem, include_ref=True)
- olditem.references.add(datatypes.ExternalID(value='99999', prop_nr='P352'))
- assert olditem.equals(newitem, include_ref=True)
-
-
-def test_equal_qualifiers():
- from wikibaseintegrator.models import Qualifiers
-
- claim1 = Item(prop_nr='P1')
- claim1.qualifiers.set([Item(prop_nr='P2', value='Q1'), Item(prop_nr='P2', value='Q2')])
- claim2 = Item(prop_nr='P4')
- claim2.qualifiers.set([Item(prop_nr='P2', value='Q1')])
- claim3 = Item(prop_nr='P4')
- claim3.qualifiers.set([Item(prop_nr='P2', value='Q1'), Item(prop_nr='P2', value='Q2')])
- claim4 = Item(prop_nr='P4')
- claim4.qualifiers.set([Item(prop_nr='P2', value='Q1'), Item(prop_nr='P2', value='Q3')])
- claim5 = Item(prop_nr='P4')
- qualifiers = Qualifiers()
- qualifiers.set([Item(prop_nr='P2', value='Q1'), Item(prop_nr='P2', value='Q2')])
- claim5.qualifiers.set(qualifiers)
-
- assert claim1.has_equal_qualifiers(claim2) is False
- assert claim1.has_equal_qualifiers(claim3) is True
- assert claim1.has_equal_qualifiers(claim4) is False
- assert claim1.has_equal_qualifiers(claim5) is True
-
-
-def test_mediainfo():
- mediainfo_item_by_title = wbi.mediainfo.get_by_title(titles='File:2018-07-05-budapest-buda-hill.jpg', mediawiki_api_url='https://commons.wikimedia.org/w/api.php')
- assert mediainfo_item_by_title.id == 'M75908279'
-
- mediainfo_item_by_id = wbi.mediainfo.get(entity_id='M75908279', mediawiki_api_url='https://commons.wikimedia.org/w/api.php')
- assert mediainfo_item_by_id.id == 'M75908279'
-
-
-def test_entity_in_basedatatype():
- assert len([x for x in BaseDataType.subclasses if x.DTYPE == 'commonsMedia']) == 1
- assert len([x for x in BaseDataType.subclasses if x.DTYPE == 'entity-schema']) == 1
- assert len([x for x in BaseDataType.subclasses if x.DTYPE == 'external-id']) == 1
- assert len([x for x in BaseDataType.subclasses if x.DTYPE == 'wikibase-form']) == 1
- assert len([x for x in BaseDataType.subclasses if x.DTYPE == 'geo-shape']) == 1
- assert len([x for x in BaseDataType.subclasses if x.DTYPE == 'globe-coordinate']) == 1
- assert len([x for x in BaseDataType.subclasses if x.DTYPE == 'wikibase-item']) == 1
- assert len([x for x in BaseDataType.subclasses if x.DTYPE == 'wikibase-lexeme']) == 1
- assert len([x for x in BaseDataType.subclasses if x.DTYPE == 'math']) == 1
- assert len([x for x in BaseDataType.subclasses if x.DTYPE == 'monolingualtext']) == 1
- assert len([x for x in BaseDataType.subclasses if x.DTYPE == 'musical-notation']) == 1
- assert len([x for x in BaseDataType.subclasses if x.DTYPE == 'wikibase-property']) == 1
- assert len([x for x in BaseDataType.subclasses if x.DTYPE == 'quantity']) == 1
- assert len([x for x in BaseDataType.subclasses if x.DTYPE == 'wikibase-sense']) == 1
- assert len([x for x in BaseDataType.subclasses if x.DTYPE == 'string']) == 1
- assert len([x for x in BaseDataType.subclasses if x.DTYPE == 'tabular-data']) == 1
- assert len([x for x in BaseDataType.subclasses if x.DTYPE == 'time']) == 1
- assert len([x for x in BaseDataType.subclasses if x.DTYPE == 'url']) == 1
-
- # Extra datatypes
- assert len([x for x in BaseDataType.subclasses if x.DTYPE == 'edtf']) == 1
- assert len([x for x in BaseDataType.subclasses if x.DTYPE == 'localMedia']) == 1
-
-
-def test_wikibaseintegrator():
- nwbi = WikibaseIntegrator(is_bot=False)
- assert nwbi.item.api.is_bot is False
- assert ItemEntity(api=nwbi, is_bot=True).api.is_bot is True
- assert ItemEntity(api=nwbi).api.is_bot is False
- assert ItemEntity().api.is_bot is False
- assert nwbi.item.get('Q582').api.is_bot is False
- assert ItemEntity(api=nwbi, is_bot=True).get('Q582').api.is_bot is True
diff --git a/test/test_entity_item.py b/test/test_entity_item.py
index 1c510c6a..c4e6b5bb 100644
--- a/test/test_entity_item.py
+++ b/test/test_entity_item.py
@@ -1,122 +1,128 @@
-import os
-import unittest
+import logging
from copy import deepcopy
import pytest
import requests
-from wikibaseintegrator import WikibaseIntegrator
+from wikibaseintegrator import WikibaseIntegrator, wbi_login
from wikibaseintegrator.datatypes import BaseDataType, Item, MonolingualText, String
from wikibaseintegrator.wbi_config import config as wbi_config
from wikibaseintegrator.wbi_exceptions import NonExistentEntityError
wbi_config['USER_AGENT'] = 'WikibaseIntegrator-pytest/1.0 (test_entity_item.py)'
+wbi_config['MEDIAWIKI_API_URL'] = 'http://localhost/w/api.php'
+wbi_config['SPARQL_ENDPOINT_URL'] = 'http://localhost:8834/proxy/wdqs/bigdata/namespace/wdq/sparql'
+wbi_config['WIKIBASE_URL'] = 'http://wikibase.svc'
-wbi = WikibaseIntegrator()
-
-
-class TestEntityItem(unittest.TestCase):
-
- def test_get(self):
- # Test with complete id
- assert wbi.item.get('Q582').id == 'Q582'
- # Test with numeric id as string
- assert wbi.item.get('582').id == 'Q582'
- # Test with numeric id as int
- assert wbi.item.get(582).id == 'Q582'
-
- # Test with invalid id
- with self.assertRaises(ValueError):
- wbi.item.get('L5')
-
- # Test with zero id
- with self.assertRaises(ValueError):
- wbi.item.get(0)
-
- # Test with negative id
- with self.assertRaises(ValueError):
- wbi.item.get(-1)
-
- # Test with negative id
- with self.assertRaises(NonExistentEntityError):
- wbi.item.get("Q99999999999999")
-
- def test_get_json(self):
- assert wbi.item.get('Q582').get_json()['labels']['fr']['value'] == 'Villeurbanne'
-
- def test_write(self):
- with self.assertRaises(requests.exceptions.JSONDecodeError):
- wbi.item.get('Q582').write(allow_anonymous=True, mediawiki_api_url=os.getenv("HTTPSTATUS_SERVICE", "https://httpbin.org") + "/status/200")
-
- def test_write_not_required(self):
- assert not wbi.item.get('Q582').write_required(base_filter=[BaseDataType(prop_nr='P1791')])
-
- def test_write_required(self):
- item = wbi.item.get('Q582')
- item.claims.add(Item(prop_nr='P1791', value='Q42'))
- assert item.write_required([BaseDataType(prop_nr='P1791')])
-
- def test_write_not_required_ref(self):
- assert not wbi.item.get('Q582').write_required(base_filter=[BaseDataType(prop_nr='P2581')], use_refs=True)
-
- def test_write_required_ref(self):
- item = wbi.item.get('Q582')
- item.claims.get('P2581')[0].references.references.pop()
- assert item.write_required(base_filter=[BaseDataType(prop_nr='P2581')], use_refs=True)
-
- def test_long_item_id(self):
- assert wbi.item.get('Item:Q582').id == 'Q582'
-
- def test_entity_url(self):
- assert wbi.item.new(id='Q582').get_entity_url() == 'http://www.wikidata.org/entity/Q582'
- assert wbi.item.new(id='582').get_entity_url() == 'http://www.wikidata.org/entity/Q582'
- assert wbi.item.new(id=582).get_entity_url() == 'http://www.wikidata.org/entity/Q582'
-
- def test_entity_qualifers_remove(self):
- item_original = wbi.item.get('Q582')
-
- # clear()
- item = deepcopy(item_original)
- assert len(item.claims.get('P443')[0].qualifiers.clear('P666')) >= 1
- item = deepcopy(item_original)
- assert len(item.claims.get('P443')[0].qualifiers.clear('P407')) == 0
- item = deepcopy(item_original)
- assert len(item.claims.get('P443')[0].qualifiers.clear()) == 0
-
- # remove()
- item = deepcopy(item_original)
- from pprint import pprint
- pprint(item.claims.get('P443')[0].qualifiers)
- assert len(item.claims.get('P443')[0].qualifiers.remove(Item(prop_nr='P407', value='Q150'))) == 0
-
- # common
- item = deepcopy(item_original)
- assert len(item.claims.get('P443')) >= 1
- assert len(item.claims.get('P443')[0].qualifiers) >= 1
-
- def test_new_lines(self):
- item = wbi.item.new()
-
- with pytest.raises(ValueError):
- item.claims.add(String(prop_nr=123, value="Multi\r\nline"))
- with pytest.raises(ValueError):
- item.claims.add(String(prop_nr=123, value="Multi\rline"))
- with pytest.raises(ValueError):
- item.claims.add(String(prop_nr=123, value="Multi\nline"))
-
- with pytest.raises(ValueError):
- item.claims.add(MonolingualText(prop_nr=123, text="Multi\r\nline"))
- item.claims.add(MonolingualText(prop_nr=123, text="Multi\rline"))
- item.claims.add(MonolingualText(prop_nr=123, text="Multi\nline"))
-
- def test_get_limited_props(self):
- item = wbi.item.get('Q582', props=['labels'])
- assert item.labels.get('fr').value == 'Villeurbanne'
- assert len(item.claims) == 0
- assert len(item.sitelinks) == 0
- assert len(item.aliases) == 0
- assert len(item.descriptions) == 0
-
- item = wbi.item.get('Q582', props=['aliases'])
- assert len(item.aliases) > 0
- assert len(item.labels) == 0
+wbi = WikibaseIntegrator(login=wbi_login.Login(user='admin', password='change-this-password'))
+logging.basicConfig(level=logging.DEBUG)
+
+
+@pytest.mark.order('first')
+@pytest.fixture
+def test_item_creation():
+ return wbi.item.new().write().id
+
+
+def test_get(test_item_creation):
+ entity_id = test_item_creation
+ # Test with complete id
+ assert wbi.item.get(entity_id).id == entity_id
+ # Test with numeric id as string
+ assert wbi.item.get(entity_id).id == entity_id
+ # Test with numeric id as int
+ assert wbi.item.get(entity_id).id == entity_id
+
+ # Test with invalid id
+ with self.assertRaises(ValueError):
+ wbi.item.get('L5')
+
+ # Test with zero id
+ with self.assertRaises(ValueError):
+ wbi.item.get(0)
+
+ # Test with negative id
+ with self.assertRaises(ValueError):
+ wbi.item.get(-1)
+
+ # Test with negative id
+ with self.assertRaises(NonExistentEntityError):
+ wbi.item.get("Q99999999999999")
+
+
+def test_get_json():
+ assert wbi.item.get('Q582').get_json()['labels']['fr']['value'] == 'Villeurbanne'
+
+
+def test_write():
+ with self.assertRaises(requests.exceptions.JSONDecodeError):
+ wbi.item.get('Q582').write(allow_anonymous=True, mediawiki_api_url='https://httpstat.us/200')
+
+
+def test_write_not_required():
+ assert not wbi.item.get('Q582').write_required(base_filter=[BaseDataType(prop_nr='P1791')])
+
+
+def test_write_required():
+ item = wbi.item.get('Q582')
+ item.claims.add(Item(prop_nr='P1791', value='Q42'))
+ assert item.write_required([BaseDataType(prop_nr='P1791')])
+
+
+def test_write_not_required_ref():
+ assert not wbi.item.get('Q582').write_required(base_filter=[BaseDataType(prop_nr='P2581')], use_refs=True)
+
+
+def test_write_required_ref():
+ item = wbi.item.get('Q582')
+ item.claims.get('P2581')[0].references.references.pop()
+ assert item.write_required(base_filter=[BaseDataType(prop_nr='P2581')], use_refs=True)
+
+
+def test_long_item_id():
+ assert wbi.item.get('Item:Q582').id == 'Q582'
+
+
+def test_entity_url():
+ assert wbi.item.new(id='Q582').get_entity_url() == 'http://www.wikidata.org/entity/Q582'
+ assert wbi.item.new(id='582').get_entity_url() == 'http://www.wikidata.org/entity/Q582'
+ assert wbi.item.new(id=582).get_entity_url() == 'http://www.wikidata.org/entity/Q582'
+
+
+def test_entity_qualifers_remove():
+ item_original = wbi.item.get('Q582')
+
+ # clear()
+ item = deepcopy(item_original)
+ assert len(item.claims.get('P452')[0].qualifiers.clear('P666')) >= 1
+ item = deepcopy(item_original)
+ assert len(item.claims.get('P452')[0].qualifiers.clear('P1013')) == 0
+ item = deepcopy(item_original)
+ assert len(item.claims.get('P452')[0].qualifiers.clear()) == 0
+
+ # remove()
+ item = deepcopy(item_original)
+ from pprint import pprint
+ pprint(item.claims.get('P452')[0].qualifiers)
+ assert len(item.claims.get('P452')[0].qualifiers.remove(Item(prop_nr='P1013', value='Q112111570'))) == 0
+
+ # common
+ item = deepcopy(item_original)
+ assert len(item.claims.get('P452')) >= 1
+ assert len(item.claims.get('P452')[0].qualifiers) >= 1
+
+
+def test_new_lines():
+ item = wbi.item.new()
+
+ with pytest.raises(ValueError):
+ item.claims.add(String(prop_nr=123, value="Multi\r\nline"))
+ with pytest.raises(ValueError):
+ item.claims.add(String(prop_nr=123, value="Multi\rline"))
+ with pytest.raises(ValueError):
+ item.claims.add(String(prop_nr=123, value="Multi\nline"))
+
+ with pytest.raises(ValueError):
+ item.claims.add(MonolingualText(prop_nr=123, text="Multi\r\nline"))
+ item.claims.add(MonolingualText(prop_nr=123, text="Multi\rline"))
+ item.claims.add(MonolingualText(prop_nr=123, text="Multi\nline"))
diff --git a/test/test_entity_lexeme.py b/test/test_entity_lexeme.py
deleted file mode 100644
index 6239ee6e..00000000
--- a/test/test_entity_lexeme.py
+++ /dev/null
@@ -1,90 +0,0 @@
-import unittest
-
-from wikibaseintegrator import WikibaseIntegrator, datatypes
-from wikibaseintegrator.models import Form
-from wikibaseintegrator.wbi_config import config as wbi_config
-
-wbi_config['USER_AGENT'] = 'WikibaseIntegrator-pytest/1.0 (test_entity_lexeme.py)'
-
-wbi = WikibaseIntegrator()
-
-
-class TestEntityLexeme(unittest.TestCase):
-
- def test_get(self):
- # Test with complete id
- assert wbi.lexeme.get('L5').id == 'L5'
- # Test with numeric id as string
- assert wbi.lexeme.get('5').id == 'L5'
- # Test with numeric id as int
- assert wbi.lexeme.get(5).id == 'L5'
-
- # Test with invalid id
- with self.assertRaises(ValueError):
- wbi.lexeme.get('Q5')
-
- # Test with zero id
- with self.assertRaises(ValueError):
- wbi.lexeme.get(0)
-
- # Test with negative id
- with self.assertRaises(ValueError):
- wbi.lexeme.get(-1)
-
- def test_get_json(self):
- assert wbi.lexeme.get('L5').get_json()['forms'][0]['representations']['es']['value'] == 'pinos'
-
- def test_long_item_id(self):
- assert wbi.lexeme.get('Lexeme:L582').id == 'L582'
-
- def test_entity_url(self):
- assert wbi.lexeme.new(id='L582').get_entity_url() == 'http://www.wikidata.org/entity/L582'
- assert wbi.lexeme.new(id='582').get_entity_url() == 'http://www.wikidata.org/entity/L582'
- assert wbi.lexeme.new(id=582).get_entity_url() == 'http://www.wikidata.org/entity/L582'
-
- # Test if the language is correctly formatted (T338255)
- def test_wrong_language(self):
- assert wbi.lexeme.new(language='http://www.wikidata.org/entity/Q397').language == 'Q397'
- assert wbi.lexeme.new(language='wd:Q397').language == 'Q397'
- assert wbi.lexeme.new(language='Q397').language == 'Q397'
- assert wbi.lexeme.new(language='397').language == 'Q397'
- assert wbi.lexeme.new(language=397).language == 'Q397'
-
- def test_get_lexeme_id(self):
- assert datatypes.Form(value='L123-F123', prop_nr='P16').get_lexeme_id() == 'L123'
- assert datatypes.Sense(value='L123-S123', prop_nr='P16').get_lexeme_id() == 'L123'
-
- def test_get_forms(self):
- lexeme = wbi.lexeme.new()
-
- form = Form(form_id='L5-F4')
- form.representations.set(language='en', value='English form representation')
- form.representations.set(language='fr', value='French form representation')
- claim = datatypes.String(prop_nr='P828', value="Create a string claim for form")
- form.claims.add(claim)
- lexeme.forms.add(form)
-
- form = Form(form_id='L5-F5')
- form.representations.set(language='en', value='English form representation')
- form.representations.set(language='fr', value='French form representation')
- claim = datatypes.String(prop_nr='P828', value="Create a string claim for form")
- form.claims.add(claim)
- lexeme.forms.add(form)
-
- form = Form()
- form.representations.set(language='en', value='English form representation')
- form.representations.set(language='fr', value='French form representation')
- claim = datatypes.String(prop_nr='P828', value="Create a string claim for form")
- form.claims.add(claim)
- lexeme.forms.add(form)
-
- form = Form()
- form.representations.set(language='en', value='English form representation')
- form.representations.set(language='fr', value='French form representation')
- claim = datatypes.String(prop_nr='P828', value="Create a string claim for form")
- form.claims.add(claim)
- lexeme.forms.add(form)
-
- assert not lexeme.forms.get('L5-F3')
- assert lexeme.forms.get('L5-F4') and lexeme.forms.get('L5-F5')
- assert len(lexeme.forms) == 4
diff --git a/test/test_entity_mediainfo.py b/test/test_entity_mediainfo.py
deleted file mode 100644
index 52bbe9c4..00000000
--- a/test/test_entity_mediainfo.py
+++ /dev/null
@@ -1,72 +0,0 @@
-import unittest
-
-from wikibaseintegrator import WikibaseIntegrator
-from wikibaseintegrator.wbi_config import config as wbi_config
-
-wbi = WikibaseIntegrator()
-
-
-class TestEntityMediaInfo(unittest.TestCase):
-
- def setUp(self):
- self._user_agent_exists = 'USER_AGENT' in wbi_config
- self._old_user_agent = wbi_config.get('USER_AGENT')
- self._wikibase_url_exists = 'WIKIBASE_URL' in wbi_config
- self._old_wikibase_url = wbi_config.get('WIKIBASE_URL')
- self._mediawiki_api_url_exists = 'MEDIAWIKI_API_URL' in wbi_config
- self._old_mediawiki_api_url = wbi_config.get('MEDIAWIKI_API_URL')
- wbi_config['USER_AGENT'] = 'WikibaseIntegrator-pytest/1.0 (test_entity_mediainfo.py)'
- wbi_config['WIKIBASE_URL'] = 'https://commons.wikimedia.org'
- wbi_config['MEDIAWIKI_API_URL'] = 'https://commons.wikimedia.org/w/api.php'
-
- def tearDown(self):
- if self._user_agent_exists:
- wbi_config['USER_AGENT'] = self._old_user_agent
- else:
- wbi_config.pop('USER_AGENT', None)
- if self._wikibase_url_exists:
- wbi_config['WIKIBASE_URL'] = self._old_wikibase_url
- else:
- wbi_config.pop('WIKIBASE_URL', None)
- if self._mediawiki_api_url_exists:
- wbi_config['MEDIAWIKI_API_URL'] = self._old_mediawiki_api_url
- else:
- wbi_config.pop('MEDIAWIKI_API_URL', None)
-
- def test_get(self):
- # Test with complete id
- assert wbi.mediainfo.get('M75908279').id == 'M75908279'
- # Test with numeric id as string
- assert wbi.mediainfo.get('75908279').id == 'M75908279'
- # Test with numeric id as int
- assert wbi.mediainfo.get(75908279).id == 'M75908279'
-
- # Test with invalid id
- with self.assertRaises(ValueError):
- wbi.mediainfo.get('L5')
-
- # Test with zero id
- with self.assertRaises(ValueError):
- wbi.mediainfo.get(0)
-
- # Test with negative id
- with self.assertRaises(ValueError):
- wbi.mediainfo.get(-1)
-
- def test_get_json(self):
- assert wbi.mediainfo.get('M75908279').get_json()
-
- def test_entity_url(self):
- assert wbi.mediainfo.new(id='M75908279').get_entity_url() == 'https://commons.wikimedia.org/entity/M75908279'
- assert wbi.mediainfo.new(id='75908279').get_entity_url() == 'https://commons.wikimedia.org/entity/M75908279'
- assert wbi.mediainfo.new(id=75908279).get_entity_url() == 'https://commons.wikimedia.org/entity/M75908279'
-
- # Test if we can read the claims/statements of the entity
- def test_entity_claims(self):
- media = wbi.mediainfo.get('M75908279')
- assert media.claims
-
- # Test if we can have the statements field in the json
- def test_get_statements(self):
- media = wbi.mediainfo.get('M75908279')
- assert media.get_json()['statements']
diff --git a/test/test_entity_property.py b/test/test_entity_property.py
deleted file mode 100644
index 049ed965..00000000
--- a/test/test_entity_property.py
+++ /dev/null
@@ -1,45 +0,0 @@
-import unittest
-
-from wikibaseintegrator import WikibaseIntegrator
-from wikibaseintegrator.wbi_config import config as wbi_config
-
-wbi_config['USER_AGENT'] = 'WikibaseIntegrator-pytest/1.0 (test_entity_property.py)'
-
-wbi = WikibaseIntegrator()
-
-
-class TestEntityProperty(unittest.TestCase):
-
- def test_get(self):
- # Test with complete id
- assert wbi.property.get('P50').id == 'P50'
- # Test with numeric id as string
- assert wbi.property.get('50').id == 'P50'
- # Test with numeric id as int
- assert wbi.property.get(50).id == 'P50'
-
- # Test with invalid id
- with self.assertRaises(ValueError):
- wbi.property.get('L5')
-
- # Test with zero id
- with self.assertRaises(ValueError):
- wbi.property.get(0)
-
- # Test with negative id
- with self.assertRaises(ValueError):
- wbi.property.get(-1)
-
- def test_get_json(self):
- assert wbi.property.get('P50', mediawiki_api_url='https://commons.wikimedia.org/w/api.php').get_json()['labels']['fr']['value'] == 'auteur ou autrice'
-
- def test_create_property(self):
- wbi.property.new(datatype='wikibase-item')
-
- def test_long_item_id(self):
- assert wbi.property.get('Property:P582').id == 'P582'
-
- def test_entity_url(self):
- assert wbi.property.new(id='P582').get_entity_url() == 'http://www.wikidata.org/entity/P582'
- assert wbi.property.new(id='582').get_entity_url() == 'http://www.wikidata.org/entity/P582'
- assert wbi.property.new(id=582).get_entity_url() == 'http://www.wikidata.org/entity/P582'
diff --git a/test/test_wbi_backoff.py b/test/test_wbi_backoff.py
deleted file mode 100644
index d1929e84..00000000
--- a/test/test_wbi_backoff.py
+++ /dev/null
@@ -1,56 +0,0 @@
-import os
-import unittest
-
-import requests
-import ujson
-
-from wikibaseintegrator import wbi_login
-from wikibaseintegrator.wbi_backoff import wbi_backoff
-from wikibaseintegrator.wbi_config import config
-
-
-class TestMethods(unittest.TestCase):
- def test_all(self):
- config['BACKOFF_MAX_TRIES'] = 2
- config['BACKOFF_MAX_VALUE'] = 3
- with self.assertRaises(requests.RequestException):
- bad_http_code()
- with self.assertRaises(requests.RequestException):
- bad_login()
- with self.assertRaises(requests.RequestException):
- bad_request()
-
- assert good_http_code() == 200
-
- with self.assertRaises(ValueError):
- bad_json()
-
-
-# @backoff.on_exception(backoff.expo, (requests.exceptions.Timeout, requests.exceptions.ConnectionError, requests.HTTPError, JSONDecodeError), max_time=60)
-
-@wbi_backoff()
-def bad_http_code():
- r = requests.get(os.getenv("HTTPSTATUS_SERVICE", "https://httpbin.org") + "/status/400")
- r.raise_for_status()
-
-
-@wbi_backoff()
-def good_http_code():
- r = requests.get(os.getenv("HTTPSTATUS_SERVICE", "https://httpbin.org") + "/status/200")
- r.raise_for_status()
- print(r.status_code)
- return r.status_code
-
-
-@wbi_backoff()
-def bad_json():
- ujson.loads("I failed :(")
-
-
-@wbi_backoff()
-def bad_request():
- requests.get("https://www.fakeurlgsdkjhjgfseg.com")
-
-
-def bad_login():
- wbi_login.Clientlogin(user='name', password='pass', mediawiki_api_url="www.wikidataaaaaaaaa.org")
diff --git a/test/test_wbi_core.py b/test/test_wbi_core.py
deleted file mode 100644
index e9db809f..00000000
--- a/test/test_wbi_core.py
+++ /dev/null
@@ -1,303 +0,0 @@
-import unittest
-from copy import deepcopy
-
-from wikibaseintegrator import WikibaseIntegrator
-from wikibaseintegrator.datatypes import (URL, CommonsMedia, ExternalID, Form, GeoShape, GlobeCoordinate, Item, Lexeme, Math, MonolingualText, MusicalNotation, Property, Quantity,
- Sense, String, TabularData, Time)
-from wikibaseintegrator.datatypes.extra import EDTF, LocalMedia
-from wikibaseintegrator.entities import ItemEntity
-from wikibaseintegrator.models import Descriptions
-from wikibaseintegrator.wbi_config import config as wbi_config
-from wikibaseintegrator.wbi_enums import ActionIfExists, WikibaseRank, WikibaseSnakType, WikibaseTimePrecision
-from wikibaseintegrator.wbi_helpers import generate_entity_instances, search_entities
-
-wbi_config['USER_AGENT'] = 'WikibaseIntegrator-pytest/1.0 (test_wbi_core.py)'
-
-wbi = WikibaseIntegrator()
-
-
-class TestWbiCore(unittest.TestCase):
- common_item = wbi.item.new().get('Q2')
-
- def test_item_engine(self):
- ItemEntity(api=wbi)
- wbi.item.new()
- ItemEntity(api=wbi).add_claims(String(value='test', prop_nr='P1'))
- ItemEntity(api=wbi).add_claims([String(value='test', prop_nr='P1')])
- ItemEntity(api=wbi, id='Q2')
- with self.assertRaises(TypeError):
- ItemEntity(api=wbi).add_claims('test')
-
- def test_get(self):
- item = wbi.item.new().get(entity_id='Q2')
-
- assert item.labels.get('en').value == "Earth"
-
- descr = item.descriptions.get('en').value
- assert len(descr) > 3
-
- assert "la Terre" in item.aliases.get('fr')
- assert "planet" in item.descriptions.get('en')
-
- assert item.labels.get('es') == "Tierra"
-
- def test_basedatatype_action_if_exists(self):
- instances = [Item(prop_nr='P31', value='Q1234'), Item(prop_nr='P31', value='Q1234')]
- item_original = wbi.item.get('Q2')
- len_claims_original = len([x.mainsnak.datavalue['value']['id'] for x in item_original.claims.get('P31')])
-
- item = deepcopy(item_original)
- item.add_claims(instances, action_if_exists=ActionIfExists.APPEND_OR_REPLACE)
- claims = [x.mainsnak.datavalue['value']['id'] for x in item.claims.get('P31')]
- # Append claims to item, only one unique added
- assert len(claims) == len_claims_original + 1 and 'Q1234' in claims and claims.count('Q1234') == 1
-
- item = deepcopy(item_original)
- item.add_claims(instances, action_if_exists=ActionIfExists.FORCE_APPEND)
- claims = [x.mainsnak.datavalue['value']['id'] for x in item.claims.get('P31')]
- # Append claims to item, force two to be added
- assert len(claims) == len_claims_original + 2 and 'Q1234' in claims and claims.count('Q1234') == 2
-
- item = deepcopy(item_original)
- item.add_claims(instances, action_if_exists=ActionIfExists.KEEP)
- claims = [x.mainsnak.datavalue['value']['id'] for x in item.claims.get('P31')]
- # Append claims to item, there is already claims, so nothing added
- assert len(claims) == len_claims_original and 'Q1234' not in claims
-
- item = deepcopy(item_original)
- item.add_claims(instances, action_if_exists=ActionIfExists.REPLACE_ALL)
- item.add_claims(instances, action_if_exists=ActionIfExists.REPLACE_ALL) # We add the instances a second time, in case everything is marked as removed.
- claims = [x.mainsnak.datavalue['value']['id'] for x in item.claims.get('P31') if not x.removed]
- removed_claims = [True for x in item.claims.get('P31') if x.removed]
- # Append claims to item, replace already existing claims with new ones, only one if it's the same property number
- assert len(claims) == 1 and 'Q1234' in claims and len(removed_claims) == len_claims_original and True in removed_claims and claims.count('Q1234') == 1
-
- def test_description(self):
- item = wbi.item.get('Q2')
-
- descr = item.descriptions.get('en').value
- assert len(descr) > 3
-
- assert "planet" in item.descriptions.get('en')
-
- # set_description on already existing description
- item.descriptions.set(value=descr)
- assert item.descriptions.get() == descr
- item.descriptions.set(value="lorem")
- assert item.descriptions.get() == "lorem"
- item.descriptions.set(language='es', value="lorem ipsum")
- assert item.descriptions.get('es') == "lorem ipsum"
- item.descriptions.set(language='en', value="lorem ipsum", action_if_exists=ActionIfExists.KEEP)
- assert item.get_json()['descriptions']['en'] == {'language': 'en', 'value': 'lorem'}
- # set_description on empty description
- item.descriptions = Descriptions()
- item.descriptions.set(value='')
- item.descriptions.set(language='en', value="lorem ipsum", action_if_exists=ActionIfExists.KEEP)
- assert item.get_json()['descriptions']['en'] == {'language': 'en', 'value': 'lorem ipsum'}
-
- item.descriptions.set(language='fr', value="lorem", action_if_exists=ActionIfExists.KEEP)
- item.descriptions.set(language='fr', value="lorem ipsum", action_if_exists=ActionIfExists.REPLACE_ALL)
- item.descriptions.set(language='en', value="lorem", action_if_exists=ActionIfExists.KEEP)
- assert item.get_json()['descriptions']['en'] == {'language': 'en', 'value': 'lorem ipsum'}
- assert item.get_json()['descriptions']['fr'] == {'language': 'fr', 'value': 'lorem ipsum'}
-
- # TODO: Test deletion of description?
-
- def test_label(self):
- item = wbi.item.get('Q2')
-
- assert item.labels.get('en') == "Earth"
-
- assert "la Terre" in item.aliases.get('fr')
-
- assert item.labels.get("es") == "Tierra"
-
- item.labels.set(value='Earth')
- item.labels.set(value='xfgfdsg')
- item.labels.set(language='en', value='xfgfdsgtest', action_if_exists=ActionIfExists.KEEP)
- assert item.get_json()['labels']['en'] == {'language': 'en', 'value': 'xfgfdsg'}
- assert item.get_json()['labels']['fr'] == {'language': 'fr', 'value': 'Terre'}
- item.aliases.set(values=["fake alias"], action_if_exists=ActionIfExists.APPEND_OR_REPLACE)
- assert {'language': 'en', 'value': 'fake alias'} in item.get_json()['aliases']['en']
-
- item.labels.set(language='fr', value=None)
- item.labels.set(language='non-exist-key', value=None)
- assert 'remove' in item.get_json()['labels']['fr']
-
- item.labels.set(language='ak')
- item.descriptions.set(language='ak')
- item.aliases.set(language='ak')
- item.labels.set(value='label', language='ak')
- item.descriptions.set(value='d', language='ak')
- item.aliases.set(values=['a'], language='ak', action_if_exists=ActionIfExists.APPEND_OR_REPLACE)
- assert 'a' in item.aliases.get('ak')
- item.aliases.set(values='b', language='ak')
- assert all(i in item.aliases.get('ak') for i in ['a', 'b']) and len(item.aliases.get('ak')) >= 2
- item.aliases.set(values='b', language='ak', action_if_exists=ActionIfExists.REPLACE_ALL)
- assert item.aliases.get('ak') == ['b']
- item.aliases.set(values=['c'], language='ak', action_if_exists=ActionIfExists.REPLACE_ALL)
- assert item.aliases.get('ak') == ['c']
- item.aliases.set(values=['d'], language='ak', action_if_exists=ActionIfExists.KEEP)
- assert 'd' not in item.aliases.get('ak')
- item.aliases.set(language='ak', action_if_exists=ActionIfExists.KEEP)
- assert 'remove' not in item.get_json()['aliases']['ak'][0]
- item.aliases.set(language='ak')
- assert 'remove' in item.get_json()['aliases']['ak'][0]
-
- def test_wd_search(self):
- t = search_entities('rivaroxaban')
- print('Number of results: ', len(t))
- self.assertIsNot(len(t), 0)
-
- def test_entity_generator(self):
- entities = {
- 'Q408883': {
- 'etype': 'item',
- 'ctype': 'ItemEntity'
- }, 'P715': {
- 'etype': 'property',
- 'ctype': 'PropertyEntity'
- }, 'Q18046452': {
- 'etype': 'item',
- 'ctype': 'ItemEntity'
- }, 'L5': {
- 'etype': 'lexeme',
- 'ctype': 'LexemeEntity'
- }
- }
-
- entity_instances = generate_entity_instances(entities=list(entities.keys()))
-
- for qid, entity in entity_instances:
- self.assertIn(qid, entities) # codespell:ignore
- assert entity.ETYPE == entities[qid]['etype']
- assert type(entity).__name__ == entities[qid]['ctype']
-
- entity_instances = generate_entity_instances(entities='Q408883')
-
- for qid, entity in entity_instances:
- assert qid == 'Q408883'
- assert entity.ETYPE == 'item'
- assert type(entity).__name__ == 'ItemEntity'
-
- def test_rank(self):
- t1 = String(value='test1', prop_nr='P1', rank='preferred')
- assert t1.rank == WikibaseRank.PREFERRED
-
- t2 = String(value='test1', prop_nr='P1', rank=WikibaseRank.NORMAL)
- assert t2.rank == WikibaseRank.NORMAL
-
- t2 = String(value='test1', prop_nr='P1', rank=WikibaseRank.DEPRECATED)
- assert t2.get_json()['rank'] == WikibaseRank.DEPRECATED.value
-
- with self.assertRaises(ValueError):
- String(value='test1', prop_nr='P1', rank='invalid_rank')
-
- def test_snaktype(self):
- t1 = String(value='test1', prop_nr='P1')
- t1.mainsnak.snaktype = 'novalue'
- assert t1.mainsnak.snaktype == WikibaseSnakType.NO_VALUE
-
- t2 = String(value='test1', prop_nr='P1')
- t2.mainsnak.snaktype = WikibaseSnakType.UNKNOWN_VALUE
- assert t2.mainsnak.snaktype == WikibaseSnakType.UNKNOWN_VALUE
-
- t3 = String(value='test1', prop_nr='P1')
- t3.mainsnak.snaktype = WikibaseSnakType.KNOWN_VALUE
- assert t3.mainsnak.get_json()['snaktype'] == WikibaseSnakType.KNOWN_VALUE.value
-
- t4 = String(value='test1', prop_nr='P1')
- with self.assertRaises(ValueError):
- t4.mainsnak.snaktype = 'invalid_value'
-
- t5 = String(prop_nr='P1', snaktype=WikibaseSnakType.NO_VALUE)
- assert t5.mainsnak.get_json()['snaktype'] == WikibaseSnakType.NO_VALUE.value
-
- def test_new_item_creation(self):
- data = [
- String(value='test1', prop_nr='P1'),
- String(value='test2', prop_nr='1'),
- String(value='test3', prop_nr=1),
- Math(value='xxx', prop_nr='P2'),
- ExternalID(value='xxx', prop_nr='P3'),
- Item(value='Q123', prop_nr='P4'),
- Item(value='123', prop_nr='P4'),
- Item(value=123, prop_nr='P4'),
- Item(value='Item:Q123', prop_nr='P4'),
- Item(value='http://www.wikidata.org/entity/Q123', prop_nr='P4'),
- Time(time='-0458-01-01T00:00:00Z', before=1, after=2, precision=WikibaseTimePrecision.MILLION_YEARS, timezone=4, prop_nr='P5'),
- Time(time='+458-01-01T00:00:00Z', before=1, after=2, precision=WikibaseTimePrecision.MILLION_YEARS, timezone=4, prop_nr='P5'),
- Time(time='+2021-01-01T00:00:00Z', before=1, after=2, precision=3, timezone=4, prop_nr='P5'),
- Time(time='now', before=1, after=2, precision=WikibaseTimePrecision.MONTH, timezone=4, prop_nr='P5'),
- Time(time='+2021-01-00T00:00:00Z', before=1, after=2, precision=WikibaseTimePrecision.MONTH, timezone=4, prop_nr='P5'),
- Time(time='+2021-00-00T00:00:00Z', before=1, after=2, precision=WikibaseTimePrecision.YEAR, timezone=4, prop_nr='P5'),
- Time(time='+2021-00-00T00:00:00Z', before=1, after=2, precision=WikibaseTimePrecision.DECADE, timezone=4, prop_nr='P5'),
- Time(time='-13700000000-00-00T00:00:00Z', before=0, after=0, precision=WikibaseTimePrecision.HUNDRED_MILLION_YEARS, timezone=0, prop_nr='P585'),
- Time(time="-2450000000-00-00T00:00:00Z", before=0, after=0, precision=WikibaseTimePrecision.TEN_MILLION_YEARS, timezone=0, prop_nr='P585'),
- Time(time="-40000-00-00T00:00:00Z", before=0, after=0, precision=WikibaseTimePrecision.TEN_THOUSAND_YEARS, timezone=0, prop_nr='P585'),
- URL(value="http://www.wikidata.org", prop_nr='P6'),
- URL(value="https://www.wikidata.org", prop_nr='P6'),
- URL(value="ftp://example.com", prop_nr='P6'),
- URL(value="ssh://user@server/project.git", prop_nr='P6'),
- URL(value="svn+ssh://user@server:8888/path", prop_nr='P6'),
- MonolingualText(text='xxx', language='fr', prop_nr='P7'),
- Quantity(amount=-5.04, prop_nr='P8'),
- Quantity(amount=5.06, upper_bound=9.99, lower_bound=-2.22, unit='Q11573', prop_nr='P8'),
- CommonsMedia(value='xxx', prop_nr='P9'),
- GlobeCoordinate(latitude=1.2345, longitude=-1.2345, precision=12, prop_nr='P10'),
- GeoShape(value='Data:xxx.map', prop_nr='P11'),
- Property(value='P123', prop_nr='P12'),
- Property(value='123', prop_nr='P12'),
- Property(value=123, prop_nr='P12'),
- Property(value='Property:P123', prop_nr='P12'),
- Property(value='http://www.wikidata.org/entity/P123', prop_nr='P12'),
- TabularData(value="Data:Taipei+Population.tab", prop_nr='P13'),
- MusicalNotation(value="\relative c' { c d e f | g2 g | a4 a a a | g1 |}", prop_nr='P14'),
- Lexeme(value='L123', prop_nr='P15'),
- Lexeme(value='123', prop_nr='P15'),
- Lexeme(value=123, prop_nr='P15'),
- Lexeme(value='Lexeme:L123', prop_nr='P15'),
- Lexeme(value='http://www.wikidata.org/entity/L123', prop_nr='P15'),
- Form(value='L123-F123', prop_nr='P16'),
- Sense(value='L123-S123', prop_nr='P17')
- ]
-
- for d in data:
- item = wbi.item.new().add_claims([d])
- assert item.get_json()
- item = wbi.item.new().add_claims(d)
- assert item.get_json()
-
- item = wbi.item.new().add_claims(data)
- assert item.get_json()
-
- def test_new_extra_item_creation(self):
- data = [
- EDTF(value='test1', prop_nr='P1'),
- LocalMedia(value='test2', prop_nr='P2')
- ]
-
- for d in data:
- item = wbi.item.new().add_claims([d])
- assert item.get_json()
- item = wbi.item.new().add_claims(d)
- assert item.get_json()
-
- item = wbi.item.new().add_claims(data)
- assert item.get_json()
-
- def test_get_property_list(self):
- self.assertTrue(len(self.common_item.claims))
-
- def test_count_references(self):
- self.assertTrue(len(self.common_item.claims.get('P2067')[0].references))
-
- def test_get_qualifier_properties(self):
- self.assertTrue(len(self.common_item.claims.get(property='P2067')))
-
- def test_claim_reset_id(self):
- item = wbi.item.get('Q582')
- claim = item.claims.get('P31')[0]
- assert claim.id is not None
- claim.reset_id()
- assert claim.id is None
diff --git a/test/test_wbi_exceptions.py b/test/test_wbi_exceptions.py
deleted file mode 100644
index 73c723de..00000000
--- a/test/test_wbi_exceptions.py
+++ /dev/null
@@ -1,164 +0,0 @@
-from unittest import TestCase
-
-from wikibaseintegrator.wbi_exceptions import ModificationFailed, SaveFailed, SearchError
-
-
-class TestWbiExceptions(TestCase):
- @staticmethod
- def test_modification_failed():
- error_dict = {'error': {'*': 'See https://test.wikidata.org/w/api.php for API usage. '
- 'Subscribe to the mediawiki-api-announce mailing list at '
- '<https://lists.wikimedia.org/postorius/lists/mediawiki-api-announce.lists.wikimedia.org/> '
- 'for notice of API deprecations and breaking changes.',
- 'code': 'modification-failed',
- 'info': 'Item [[Q582|Q582]] already has label "MODIFIED LABEL" '
- 'associated with language code en, using the same '
- 'description text.',
- 'messages': [{'html': {'*': 'Item Q582 already has '
- 'label "MODIFIED LABEL" associated with '
- 'language code en, using the same '
- 'description text.'},
- 'name': 'wikibase-validator-label-with-description-conflict',
- 'parameters': ['MODIFIED LABEL',
- 'en',
- '[[Q582|Q582]]']}]},
- 'servedby': 'mw1375'}
-
- modification_failed = ModificationFailed(error_dict['error'])
-
- assert str(modification_failed) == "'Item [[Q582|Q582]] already has label \"MODIFIED LABEL\" associated with language code en, using the same description text.'"
- assert modification_failed.code == 'modification-failed'
- assert modification_failed.info == 'Item [[Q582|Q582]] already has label "MODIFIED LABEL" associated with language code en, using the same description text.'
- assert 'wikibase-validator-label-with-description-conflict' in modification_failed.messages_names
- assert 'Q582' in modification_failed.get_conflicting_entity_ids
- assert 'en' in modification_failed.get_languages
-
- def test_invalid_claim(self):
- error_dict = {
- 'error': {
- '*': 'See https://test.wikidata.org/w/api.php for API usage. Subscribe to the mediawiki-api-announce mailing list at <https://lists.wikimedia.org/mailman/listinfo/mediawiki-api-announce> for notice of API deprecations and breaking changes.',
- 'code': 'invalid-claim',
- 'info': "'' is not a valid property ID",
- 'messages': [{
- 'name': 'wikibase-api-invalid-claim',
- 'parameters': ["'' is not a valid property ID"],
- 'html': {'*': ' is not a valid property ID'}
- }],
- }}
-
- invalid_claim = ModificationFailed(error_dict['error'])
-
- assert str(invalid_claim) == '"\'\' is not a valid property ID"'
- assert invalid_claim.code == 'invalid-claim'
- assert invalid_claim.info == "'' is not a valid property ID"
- assert 'wikibase-api-invalid-claim' in invalid_claim.messages_names
-
- def test_modification_failed_no_dict(self):
- error_dict = {}
- with self.assertRaises(KeyError):
- ModificationFailed(error_dict['error'])
-
- def test_modification_failed_no_message(self):
- error_dict = {'error': {'*': 'See https://test.wikidata.org/w/api.php for API usage. '
- 'Subscribe to the mediawiki-api-announce mailing list at '
- '<https://lists.wikimedia.org/postorius/lists/mediawiki-api-announce.lists.wikimedia.org/> '
- 'for notice of API deprecations and breaking changes.',
- 'code': 'modification-failed',
- 'info': 'Item [[Q582|Q582]] already has label "MODIFIED LABEL" '
- 'associated with language code en, using the same '
- 'description text.'
- },
- 'servedby': 'mw1375'}
-
- exception = ModificationFailed(error_dict['error'])
- assert 'wikibaseintegrator-missing-messages' in exception.messages_names
-
- def test_failed_save_no_conflict(self):
- error_dict = {'error': {'*': 'See https://test.wikidata.org/w/api.php for API usage. '
- 'Subscribe to the mediawiki-api-announce mailing list at '
- '<https://lists.wikimedia.org/postorius/lists/mediawiki-api-announce.lists.wikimedia.org/> '
- 'for notice of API deprecations and breaking changes.',
- 'code': 'failed-save',
- 'info': 'The save has failed.',
- 'messages': [{'html': {'*': 'The save has failed.'},
- 'name': 'wikibase-api-failed-save',
- 'parameters': []}]},
- 'servedby': 'mw1425'}
-
- failed_save = SaveFailed(error_dict['error'])
-
- assert failed_save.get_conflicting_entity_ids == []
-
- def test_modification_failed_no_parameters(self):
- error_dict = {'error': {'*': 'See https://test.wikidata.org/w/api.php for API usage. '
- 'Subscribe to the mediawiki-api-announce mailing list at '
- '<https://lists.wikimedia.org/postorius/lists/mediawiki-api-announce.lists.wikimedia.org/> '
- 'for notice of API deprecations and breaking changes.',
- 'code': 'modification-failed',
- 'info': 'Item [[Q582|Q582]] already has label "MODIFIED LABEL" '
- 'associated with language code en, using the same '
- 'description text.',
- 'messages': [{'html': {'*': 'Item Q582 already has '
- 'label "MODIFIED LABEL" associated with '
- 'language code en, using the same '
- 'description text.'},
- 'name': 'wikibase-validator-label-with-description-conflict',
- }]},
- 'servedby': 'mw1375'}
-
- modification_failed = ModificationFailed(error_dict['error'])
- with self.assertRaises(KeyError):
- _ = modification_failed.get_languages
-
- @staticmethod
- def test_failed_save():
- error_dict = {'error': {'*': 'See https://test.wikidata.org/w/api.php for API usage. '
- 'Subscribe to the mediawiki-api-announce mailing list at '
- '<https://lists.wikimedia.org/postorius/lists/mediawiki-api-announce.lists.wikimedia.org/> '
- 'for notice of API deprecations and breaking changes.',
- 'code': 'failed-save',
- 'info': 'The save has failed.',
- 'messages': [{'html': {'*': 'The save has failed.'},
- 'name': 'wikibase-api-failed-save',
- 'parameters': []},
- {'html': {'*': 'Property P50 already '
- 'has label "Depiction" associated with '
- 'language code en.'},
- 'name': 'wikibase-validator-label-conflict',
- 'parameters': ['Depiction',
- 'en',
- '[[Property:P50|P50]]']},
- {'html': {'*': 'Property P50 already '
- 'has label "representación" associated '
- 'with language code es.'},
- 'name': 'wikibase-validator-label-conflict',
- 'parameters': ['representación',
- 'es',
- '[[Property:P50|P50]]']}]},
- 'servedby': 'mw1425'}
-
- failed_save = SaveFailed(error_dict['error'])
-
- assert str(failed_save) == "'The save has failed.'"
- assert failed_save.code == 'failed-save'
- assert failed_save.info == 'The save has failed.'
- assert 'wikibase-api-failed-save' in failed_save.messages_names
- assert 'P50' in failed_save.get_conflicting_entity_ids
- assert len(failed_save.get_conflicting_entity_ids) == 1
- assert 'en' in failed_save.get_languages
-
- @staticmethod
- def test_searcherror():
- assert str(SearchError('SearchError')) == 'SearchError'
-
- def test_modification_failed_error_dict(self):
- error_dict = {'error': {}}
-
- exception = ModificationFailed(error_dict['error'])
- assert 'wikibaseintegrator-missing-messages' in exception.messages_names
- assert exception.info == 'MWApiError'
- assert exception.code == 'wikibaseintegrator-missing-error-code'
diff --git a/test/test_wbi_fastrun.py b/test/test_wbi_fastrun.py
deleted file mode 100644
index 92c379a5..00000000
--- a/test/test_wbi_fastrun.py
+++ /dev/null
@@ -1,211 +0,0 @@
-from collections import defaultdict
-from typing import Any
-
-from wikibaseintegrator import WikibaseIntegrator, wbi_fastrun
-from wikibaseintegrator.datatypes import BaseDataType, ExternalID, Item
-from wikibaseintegrator.wbi_config import config as wbi_config
-from wikibaseintegrator.wbi_enums import ActionIfExists
-
-wbi_config['USER_AGENT'] = 'WikibaseIntegrator-pytest/1.0 (test_wbi_fastrun.py)'
-
-wbi = WikibaseIntegrator()
-
-
-def test_query_data():
- """
- test_fastrun.test_query_data
- This hits live wikidata and may change !!
-
- This tests that the fast run container correctly queries data from wikidata and stores it in the appropriate format
- without getting references
- """
- frc = wbi_fastrun.FastRunContainer(base_filter=[BaseDataType(prop_nr='P699')], base_data_type=BaseDataType)
- # get a string value
- frc._query_data('P699')
- # wikidata-item value
- frc._query_data('P828')
- # uri value
- frc._query_data('P2888')
-
- # https://www.wikidata.org/wiki/Q10874
- assert 'Q10874' in frc.prop_data
- assert 'P699' in frc.prop_data['Q10874']
- # the ID may change, so retrieve it
- statement_id = list(frc.prop_data['Q10874']['P699'].keys())[0]
- d = frc.prop_data['Q10874']['P699'][statement_id]
- # d looks like: {'qual': set(), 'ref': {}, 'v': 'DOID:1432'}
- assert all(x in d for x in {'qual', 'ref', 'v'})
- assert frc.prop_data['Q10874']['P699'][statement_id]['v'].startswith('"DOID:')
-
- # item
- assert list(frc.prop_data['Q10874']['P828'].values())[0]['v'] == "Q18228398"
-
- # uri
- v = {x['v'] for x in frc.prop_data['Q10874']['P2888'].values()}
- assert all(y.startswith(" 0
- ref_id = list(d['ref'].keys())[0]
- ref = d['ref'][ref_id]
- assert len(ref) > 1
-
-
-class FastRunContainerFakeQueryDataEnsembl(wbi_fastrun.FastRunContainer):
- def __init__(self, *args: Any, **kwargs: Any):
- super().__init__(*args, **kwargs)
- self.prop_dt_map = {'P248': 'wikibase-item', 'P594': 'external-id'}
- self.prop_data['Q14911732'] = {'P594': {
- 'fake statement id': {
- 'qual': set(),
- 'ref': {'fake ref id': {
- ('P248',
- 'Q106833387'),
- ('P594',
- 'ENSG00000123374')}},
- 'unit': '1',
- 'v': '"ENSG00000123374"'}}}
- self.rev_lookup = defaultdict(set)
- self.rev_lookup['"ENSG00000123374"'].add('Q14911732')
-
-
-class FastRunContainerFakeQueryDataEnsemblNoRef(wbi_fastrun.FastRunContainer):
- def __init__(self, *args: Any, **kwargs: Any):
- super().__init__(*args, **kwargs)
- self.prop_dt_map = {'P248': 'wikibase-item', 'P594': 'external-id'}
- self.prop_data['Q14911732'] = {'P594': {
- 'fake statement id': {
- 'qual': set(),
- 'ref': {},
- 'v': 'ENSG00000123374'}}}
- self.rev_lookup = defaultdict(set)
- self.rev_lookup['"ENSG00000123374"'].add('Q14911732')
-
-
-def test_fastrun_ref_ensembl():
- # fastrun checks refs
- frc = FastRunContainerFakeQueryDataEnsembl(base_filter=[BaseDataType(prop_nr='P594'), Item(prop_nr='P703', value='Q15978631')], base_data_type=BaseDataType, use_refs=True)
-
- # statement has no ref
- statements = [ExternalID(value='ENSG00000123374', prop_nr='P594')]
- assert frc.write_required(data=statements)
-
- # statement has the same ref
- statements = [ExternalID(value='ENSG00000123374', prop_nr='P594', references=[[Item("Q106833387", prop_nr="P248"), ExternalID("ENSG00000123374", prop_nr="P594")]])]
- assert not frc.write_required(data=statements)
-
- # new statement has an different stated in
- statements = [ExternalID(value='ENSG00000123374', prop_nr='P594', references=[[Item("Q99999999999", prop_nr="P248"), ExternalID("ENSG00000123374", prop_nr="P594", )]])]
- assert frc.write_required(data=statements)
-
- # fastrun don't check references, statement has no reference,
- frc = FastRunContainerFakeQueryDataEnsemblNoRef(base_filter=[BaseDataType(prop_nr='P594'), Item(prop_nr='P703', value='Q15978631')], base_data_type=BaseDataType,
- use_refs=False)
- statements = [ExternalID(value='ENSG00000123374', prop_nr='P594')]
- assert not frc.write_required(data=statements)
-
- # fastrun don't check references, statement has reference,
- frc = FastRunContainerFakeQueryDataEnsemblNoRef(base_filter=[BaseDataType(prop_nr='P594'), Item(prop_nr='P703', value='Q15978631')], base_data_type=BaseDataType,
- use_refs=False)
- statements = [ExternalID(value='ENSG00000123374', prop_nr='P594', references=[[Item("Q123", prop_nr="P31")]])]
- assert not frc.write_required(data=statements)
-
-
-class FakeQueryDataAppendProps(wbi_fastrun.FastRunContainer):
- # an item with three values for the same property
- def __init__(self, *args: Any, **kwargs: Any):
- super().__init__(*args, **kwargs)
- self.prop_dt_map = {'P527': 'wikibase-item', 'P248': 'wikibase-item', 'P594': 'external-id'}
-
- self.rev_lookup = defaultdict(set)
- self.rev_lookup['Q24784025'].add('Q3402672')
- self.rev_lookup['Q24743729'].add('Q3402672')
- self.rev_lookup['Q24782625'].add('Q3402672')
-
- self.prop_data['Q3402672'] = {'P527': {
- 'Q3402672-11BA231B-857B-498B-AC4F-91D71EE007FD': {'qual': set(),
- 'ref': {
- '149c9c7ba4e246d9f09ce3ed0cdf7aa721aad5c8': {
- ('P248', 'Q3047275'),
- }},
- 'v': 'Q24784025'},
- 'Q3402672-15F54AFF-7DCC-4DF6-A32F-73C48619B0B2': {'qual': set(),
- 'ref': {
- '149c9c7ba4e246d9f09ce3ed0cdf7aa721aad5c8': {
- ('P248', 'Q3047275'),
- }},
- 'v': 'Q24743729'},
- 'Q3402672-C8F11D55-1B11-44E5-9EAF-637E062825A4': {'qual': set(),
- 'ref': {
- '149c9c7ba4e246d9f09ce3ed0cdf7aa721aad5c8': {
- ('P248', 'Q3047275')}},
- 'v': 'Q24782625'}}}
-
-
-def test_append_props():
- qid = 'Q3402672'
- # https://www.wikidata.org/wiki/Q3402672#P527
-
- # don't consider refs
- frc = FakeQueryDataAppendProps(base_filter=[BaseDataType(prop_nr='P352'), Item(prop_nr='P703', value='Q15978631')], base_data_type=BaseDataType)
- # with append
- statements = [Item(value='Q24784025', prop_nr='P527')]
- assert frc.write_required(data=statements, action_if_exists=ActionIfExists.APPEND_OR_REPLACE, cqid=qid) is False
- # with force append
- statements = [Item(value='Q24784025', prop_nr='P527')]
- assert frc.write_required(data=statements, action_if_exists=ActionIfExists.FORCE_APPEND, cqid=qid) is True
- # without append
- statements = [Item(value='Q24784025', prop_nr='P527')]
- assert frc.write_required(data=statements, cqid=qid) is True
-
- # if we are in append mode, and the refs are different, we should write
- frc = FakeQueryDataAppendProps(base_filter=[BaseDataType(prop_nr='P352'), Item(prop_nr='P703', value='Q15978631')], base_data_type=BaseDataType, use_refs=True)
- # with append
- statements = [Item(value='Q24784025', prop_nr='P527')]
- assert frc.write_required(data=statements, cqid=qid, action_if_exists=ActionIfExists.APPEND_OR_REPLACE) is True
- # without append
- statements = [Item(value='Q24784025', prop_nr='P527')]
- assert frc.write_required(data=statements, cqid=qid) is True
diff --git a/test/test_wbi_helpers.py b/test/test_wbi_helpers.py
deleted file mode 100644
index 2d4b8a76..00000000
--- a/test/test_wbi_helpers.py
+++ /dev/null
@@ -1,160 +0,0 @@
-import logging
-import os
-import unittest
-
-import pytest
-import requests
-
-from wikibaseintegrator.wbi_config import config as wbi_config
-from wikibaseintegrator.wbi_exceptions import MaxRetriesReachedException
-from wikibaseintegrator.wbi_helpers import execute_sparql_query, format2wbi, get_user_agent, mediawiki_api_call_helper
-
-
-def test_connection():
- wbi_config['USER_AGENT'] = 'WikibaseIntegrator-pytest/1.0 (test_wbi_helpers.py)'
- data = {'format': 'json', 'action': 'wbgetentities', 'ids': 'Q42'}
-
- mediawiki_api_call_helper(data=data, max_retries=2, retry_after=1, allow_anonymous=True)
-
- with pytest.raises(MaxRetriesReachedException):
- mediawiki_api_call_helper(data=data, mediawiki_api_url="https://www.wikidataaaaaaa.org", max_retries=2, retry_after=1, allow_anonymous=True)
-
- with pytest.raises(MaxRetriesReachedException):
- mediawiki_api_call_helper(data=data, mediawiki_api_url=os.getenv("HTTPSTATUS_SERVICE", "https://httpbin.org") + "/status/500", max_retries=2, retry_after=1, allow_anonymous=True)
-
- with pytest.raises(MaxRetriesReachedException):
- mediawiki_api_call_helper(data=data, mediawiki_api_url=os.getenv("HTTPSTATUS_SERVICE", "https://httpbin.org") + "/status/502", max_retries=2, retry_after=1, allow_anonymous=True)
-
- with pytest.raises(MaxRetriesReachedException):
- mediawiki_api_call_helper(data=data, mediawiki_api_url=os.getenv("HTTPSTATUS_SERVICE", "https://httpbin.org") + "/status/503", max_retries=2, retry_after=1, allow_anonymous=True)
-
- with pytest.raises(MaxRetriesReachedException):
- mediawiki_api_call_helper(data=data, mediawiki_api_url=os.getenv("HTTPSTATUS_SERVICE", "https://httpbin.org") + "/status/504", max_retries=2, retry_after=1, allow_anonymous=True)
-
- with pytest.raises(requests.HTTPError):
- mediawiki_api_call_helper(data=data, mediawiki_api_url=os.getenv("HTTPSTATUS_SERVICE", "https://httpbin.org") + "/status/400", max_retries=2, retry_after=1, allow_anonymous=True)
-
-
-def test_user_agent(caplog):
- wbi_config['USER_AGENT'] = None # Reset user agent
- # Test there is no warning because of the user agent
- with caplog.at_level(logging.WARNING):
- mediawiki_api_call_helper(data={'format': 'json', 'action': 'wbgetentities', 'ids': 'Q42'}, max_retries=3, retry_after=1, allow_anonymous=True,
- user_agent='MyWikibaseBot/0.5')
- assert 'WARNING' not in caplog.text
-
- # Test there is a warning
- with caplog.at_level(logging.WARNING):
- mediawiki_api_call_helper(data={'format': 'json', 'action': 'wbgetentities', 'ids': 'Q42'}, max_retries=3, retry_after=1, allow_anonymous=True)
- assert 'Please set an user agent' in caplog.text
-
- # Test if the user agent is correctly added
- new_user_agent = get_user_agent(user_agent='MyWikibaseBot/0.5')
- assert new_user_agent.startswith('MyWikibaseBot/0.5')
- assert 'WikibaseIntegrator' in new_user_agent
-
-
-def test_allow_anonymous():
- wbi_config['USER_AGENT'] = 'WikibaseIntegrator-pytest/1.0 (test_wbi_helpers.py)'
- # Test there is a warning because of allow_anonymous
- with pytest.raises(ValueError):
- mediawiki_api_call_helper(data={'format': 'json', 'action': 'wbgetentities', 'ids': 'Q42'}, max_retries=3, retry_after=1, user_agent='MyWikibaseBot/0.5')
-
- # Test there is no warning because of allow_anonymous
- assert mediawiki_api_call_helper(data={'format': 'json', 'action': 'wbgetentities', 'ids': 'Q42'}, max_retries=3, retry_after=1, allow_anonymous=True,
- user_agent='MyWikibaseBot/0.5')
-
-
-def test_sparql():
- wbi_config['USER_AGENT'] = 'WikibaseIntegrator-pytest/1.0 (test_wbi_helpers.py)'
- results = execute_sparql_query('''SELECT ?child ?childLabel
-WHERE
-{
-# ?child father Bach
- ?child wdt:P22 wd:Q1339.
- SERVICE wikibase:label { bd:serviceParam wikibase:language "[AUTO_LANGUAGE]". }
-}''')
- assert len(results['results']['bindings']) > 1
-
-
-def test_format2wbi():
- wbi_config['USER_AGENT'] = 'WikibaseIntegrator-pytest/1.0 (test_wbi_helpers.py)'
- from wikibaseintegrator.entities import ItemEntity, LexemeEntity, MediaInfoEntity, PropertyEntity
-
- assert isinstance(format2wbi('item', '{}'), ItemEntity)
- assert isinstance(format2wbi('property', '{}'), PropertyEntity)
- assert isinstance(format2wbi('lexeme', '{}'), LexemeEntity)
- assert isinstance(format2wbi('mediainfo', '{}'), MediaInfoEntity)
- with pytest.raises(ValueError):
- format2wbi('unknown', '{}')
-
- result = format2wbi('item', '''{
- "aliases": {
- "uk": "Війєрбан",
- "be": [
- {
- "value": "Вілербан"
- },
- {
- "value": "Віербан"
- }
- ],
- "en": [
- "first alias",
- "second alias"
- ]
- },
- "labels": {
- "en": "Between Expressiveness and Verifiability: P/T-nets with Synchronous Channels and Modular Structure"
- },
- "descriptions": {
- "en": "scientific paper published in CEUR-WS Volume 3170"
- },
- "claims": {
- "P1433": "Q113529188",
- "P1476": {
- "text": "Between Expressiveness and Verifiability: P/T-nets with Synchronous Channels and Modular Structure",
- "language": "en"
- },
- "P2093": [
- {
- "value": "Lukas Voß",
- "qualifiers": {
- "P1545": "1"
- }
- },
- {
- "value": "Sven Willrodt",
- "qualifiers": {
- "P1545": "2"
- }
- },
- {
- "value": "Daniel Moldt",
- "qualifiers": {
- "P1545": "3"
- }
- },
- {
- "value": "Michael Haustermann",
- "qualifiers": {
- "P1545": "4"
- }
- }
- ],
- "P31": "Q13442814",
- "P407": "Q1860",
- "P50": [],
- "P953": "https://ceur-ws.org/Vol-3170/paper3.pdf"
- }
-}''')
- assert isinstance(result, ItemEntity)
-
- # Test aliases
- # TODO: add test aliases
-
- # Test descriptions
- assert result.descriptions.get('en') == 'scientific paper published in CEUR-WS Volume 3170'
-
- # Test labels
- assert result.labels.get('en') == 'Between Expressiveness and Verifiability: P/T-nets with Synchronous Channels and Modular Structure'
diff --git a/test/test_wbi_login.py b/test/test_wbi_login.py
deleted file mode 100644
index 8c7e408c..00000000
--- a/test/test_wbi_login.py
+++ /dev/null
@@ -1,90 +0,0 @@
-import os
-import sys
-import unittest
-
-import pytest
-import requests
-from oauthlib.oauth2 import MissingTokenError
-
-from wikibaseintegrator import wbi_login
-from wikibaseintegrator.wbi_helpers import mediawiki_api_call_helper
-# look for environment variables. if none set, don't do anything
-from wikibaseintegrator.wbi_login import LoginError
-
-WDUSER = os.getenv("WDUSER")
-WDPASS = os.getenv("WDPASS")
-OAUTH1_CONSUMER_TOKEN_NOT_OWNER_ONLY = os.getenv("OAUTH1_CONSUMER_TOKEN_NOT_OWNER_ONLY")
-OAUTH1_CONSUMER_SECRET_NOT_OWNER_ONLY = os.getenv("OAUTH1_CONSUMER_SECRET_NOT_OWNER_ONLY")
-OAUTH1_CONSUMER_TOKEN = os.getenv("OAUTH1_CONSUMER_TOKEN")
-OAUTH1_CONSUMER_SECRET = os.getenv("OAUTH1_CONSUMER_SECRET")
-OAUTH1_ACCESS_TOKEN = os.getenv("OAUTH1_ACCESS_TOKEN")
-OAUTH1_ACCESS_SECRET = os.getenv("OAUTH1_ACCESS_SECRET")
-OAUTH2_CONSUMER_TOKEN = os.getenv("OAUTH2_CONSUMER_TOKEN")
-OAUTH2_CONSUMER_SECRET = os.getenv("OAUTH2_CONSUMER_SECRET")
-
-
-def test_login():
- with pytest.raises(LoginError):
- login = wbi_login.Clientlogin(user='wrong', password='wrong')
- login.generate_edit_credentials()
-
- with pytest.raises(LoginError):
- login = wbi_login.Login(user='wrong', password='wrong')
- login.generate_edit_credentials()
-
- if WDUSER and WDPASS:
- assert wbi_login.Clientlogin(user=WDUSER, password=WDPASS)
- assert wbi_login.Login(user=WDUSER, password=WDPASS)
- else:
- print("no WDUSER or WDPASS found in environment variables", file=sys.stderr)
-
-
-def test_verify():
- with pytest.raises(requests.exceptions.SSLError):
- wbi_login.Clientlogin(user='wrong', password='wrong', mediawiki_api_url='https://self-signed.badssl.com/', verify=True)
-
- with pytest.raises(requests.exceptions.JSONDecodeError):
- wbi_login.Clientlogin(user='wrong', password='wrong', mediawiki_api_url='https://self-signed.badssl.com/', verify=False)
-
-
-def test_oauth1():
- with pytest.raises(LoginError):
- login = wbi_login.OAuth1(consumer_token='wrong', consumer_secret='wrong')
- login.generate_edit_credentials()
-
- if OAUTH1_CONSUMER_TOKEN_NOT_OWNER_ONLY and OAUTH1_CONSUMER_SECRET_NOT_OWNER_ONLY:
- wbi_login.OAuth1(consumer_token=OAUTH1_CONSUMER_TOKEN_NOT_OWNER_ONLY, consumer_secret=OAUTH1_CONSUMER_SECRET_NOT_OWNER_ONLY)
- else:
- print("no OAUTH1_CONSUMER_TOKEN_NOT_OWNER_ONLY or OAUTH1_CONSUMER_SECRET_NOT_OWNER_ONLY found in environment variables", file=sys.stderr)
-
-
-def test_oauth1_access():
- with pytest.raises(LoginError):
- login = wbi_login.OAuth1(consumer_token='wrong', consumer_secret='wrong', access_token='wrong', access_secret='wrong')
- login.generate_edit_credentials()
-
- if OAUTH1_CONSUMER_TOKEN and OAUTH1_CONSUMER_SECRET and OAUTH1_ACCESS_TOKEN and OAUTH1_ACCESS_SECRET:
- login = wbi_login.OAuth1(consumer_token=OAUTH1_CONSUMER_TOKEN, consumer_secret=OAUTH1_CONSUMER_SECRET, access_token=OAUTH1_ACCESS_TOKEN, access_secret=OAUTH1_ACCESS_SECRET)
- login.generate_edit_credentials()
- else:
- print("no OAUTH1_CONSUMER_TOKEN or OAUTH1_CONSUMER_SECRET or OAUTH1_ACCESS_TOKEN or OAUTH1_ACCESS_SECRET found in environment variables", file=sys.stderr)
-
-
-def test_oauth2():
- with pytest.raises((MissingTokenError, LoginError)):
- login = wbi_login.OAuth2(consumer_token='wrong', consumer_secret='wrong')
- login.generate_edit_credentials()
-
- if OAUTH2_CONSUMER_TOKEN and OAUTH2_CONSUMER_SECRET:
- login = wbi_login.OAuth2(consumer_token=OAUTH2_CONSUMER_TOKEN, consumer_secret=OAUTH2_CONSUMER_SECRET)
- login.generate_edit_credentials()
- else:
- print("no OAUTH2_CONSUMER_TOKEN or CLIENT_SECRET found in environment variables", file=sys.stderr)
-
-
-def test_mismatch_api_url():
- if WDUSER and WDPASS:
- login = wbi_login.Login(user=WDUSER, password=WDPASS)
- login.generate_edit_credentials()
- with pytest.raises(ValueError):
- mediawiki_api_call_helper(data={}, login=login, mediawiki_api_url='https://unsdfdskfjljzkerezr.org/w/api.php')
diff --git a/wikibaseintegrator/wbi_helpers.py b/wikibaseintegrator/wbi_helpers.py
index f6741407..6bf11c27 100644
--- a/wikibaseintegrator/wbi_helpers.py
+++ b/wikibaseintegrator/wbi_helpers.py
@@ -7,6 +7,7 @@
import json
import logging
import re
+from pprint import pprint
from time import sleep
from typing import TYPE_CHECKING, Any
from urllib.parse import urlparse
@@ -218,6 +219,8 @@ def mediawiki_api_call_helper(data: dict[str, Any], login: _Login | None = None,
log.debug(data)
+ pprint(data)
+
return mediawiki_api_call('POST', mediawiki_api_url=mediawiki_api_url, session=session, data=data, headers=headers, max_retries=max_retries, retry_after=retry_after, **kwargs)