1. import pywikibot
  2. from pywikibot import pagegenerators as pg
  3. import requests
  4. from urllib.parse import urlparse
  5. import os
  6. QUERY="""
  7. SELECT DISTINCT ?item WHERE {
  8. ?item wdt:P1324 ?repo FILTER CONTAINS(str(?repo), "github.com").
  9. FILTER NOT EXISTS { ?item p:P2992 ?qa }
  10. SERVICE wikibase:label {
  11. bd:serviceParam wikibase:language "en" .
  12. }
  13. }
  14. """
  15. wikidata_site = pywikibot.Site("wikidata", "wikidata")
  16. P_described_at_url = "P973"
  17. P_archive_url = "P1065"
  18. P_software_quality_assurance = "P2992"
  19. Q_continuous_integration = "Q965769"
  20. for item in pg.WikidataSPARQLPageGenerator(QUERY, site=wikidata_site):
  21. print(str(item))
  22. item_dict = item.get()
  23. clm_dict = item_dict["claims"]
  24. for url in [ claim.getTarget() for claim in clm_dict['P1324'] ]:
  25. if 'github.com' not in url:
  26. continue
  27. path = os.path.normpath(urlparse(url).path)[1:]
  28. if len(path.split("/", -1)) != 2:
  29. print("SKIP: GET " + url + " path does not have exactly two elements")
  30. continue
  31. if requests.get(url).status_code != requests.codes.ok:
  32. print("ERROR: GET " + url + " failed")
  33. continue
  34. travis = url + "/blob/master/.travis.yml"
  35. if requests.get(travis).status_code != requests.codes.ok:
  36. print("SKIP: GET " + travis + " not found")
  37. continue
  38. travis_ci = "https://travis-ci.org/" + path
  39. if requests.get(travis_ci).status_code != requests.codes.ok:
  40. print("SKIP: GET " + travis_ci + " not found")
  41. continue
  42. print("FOUND " + travis + " and " + travis_ci)
  43. continuous_integration = pywikibot.ItemPage(wikidata_site, Q_continuous_integration, 0)
  44. software_quality_assurance = pywikibot.Claim(wikidata_site, P_software_quality_assurance, 0)
  45. software_quality_assurance.setTarget(continuous_integration)
  46. item.addClaim(software_quality_assurance)
  47. described_at_url = pywikibot.Claim(wikidata_site, P_described_at_url, 0)
  48. described_at_url.setTarget(travis)
  49. software_quality_assurance.addQualifier(described_at_url)
  50. archive_url = pywikibot.Claim(wikidata_site, P_archive_url, 0)
  51. archive_url.setTarget(travis_ci)
  52. software_quality_assurance.addQualifier(archive_url)