Usuário:Ederporto/Testes/Programação

Origem: Wikipédia, a enciclopédia livre.
Se deseja eliminar uma página, escreva {{ER|1|~~~~}} no código fonte.

Este código está escrito em linguagem de programação Python.

A partir de uma lista de artigos com determinadas datas, o código pega os dados sobre visualizações dos artigos da lista.

#Imports
from datetime import datetime
from dateutil.relativedelta import relativedelta
import urllib.request as ur
import requests
import json

#Transform the date from string to date format
def parse_date(stringDate):
    return datetime.strptime(stringDate.ljust(10, '0'), '%Y%m%d%H')

#Transform the date from date format to string
def format_date(d):
    return datetime.strftime(d, '%Y%m%d%H')

#Return the url given the article and the date
def endpoint(article,date,language,project):
    return (requests.get(''.join(["https://wikimedia.org/api/rest_v1/metrics/pageviews/per-article/",language,".",project,"/all-access/user/",article,"/daily/",format_date(parse_date(date) - relativedelta(days=240)),"/",format_date(parse_date(date) + relativedelta(days=239))]))).url

#Get the views grouped by bimester for the 4 bimesters before and the 4 after de date
def Bimonthlysum(article,Listofdates,Listsofviews,date):
    i=0
    s1=0     #Total views on the 4th bimester before the date
    s2=0     #Total views on the 3th bimester before the date
    s3=0     #Total views on the 2nd bimester before the date
    s4=0     #Total views on the 1st bimester before the date
    s5=0     #Total views on the 1st bimester after the date
    s6=0     #Total views on the 2nd bimester after the date
    s7=0     #Total views on the 3th bimester after the date
    s8=0     #Total views on the 4th bimester after the date
    while i<480:
        aux = format_date(parse_date(date) - relativedelta(days=240-i))
        if aux in Listofdates:
            if i<60:
                s1 = s1 + Listofviews[Listofdates.index(aux)]
            elif i<120:
                s2 = s2 + Listofviews[Listofdates.index(aux)]
            elif i<180:
                s3 = s3 + Listofviews[Listofdates.index(aux)]
            elif i<240:
                s4 = s4 + Listofviews[Listofdates.index(aux)]
            elif i<300:
                s5 = s5 + Listofviews[Listofdates.index(aux)]
            elif i<360:
                s6 = s6 + Listofviews[Listofdates.index(aux)]
            elif i<420:
                s7 = s7 + Listofviews[Listofdates.index(aux)]
            else:
                s8 = s8 + Listofviews[Listofdates.index(aux)]
        i=i+1
    print ('%s\t%d\t%d\t%d\t%d\t%d\t%d\t%d\t%d'%(article,s1,s2,s3,s4,s5,s6,s7,s8))

Articles=["Problema de valor inicial", "Hiroshi Okamura", "Processo estocástico", "Nervo musculocutâneo", "Sistema circulatório equino", "Saúde canina", "Portal:Probabilidade e estatística", "Membros do cavalo", "Mediana (estatística)", "Jogo de computação baseado em humanos"]
Dates=["20160829", "20160831", "20161017", "20161026", "20161107", "20161119", "20161207", "20161207", "20161209", "20161212"]
Wikis=["pt", "pt", "pt", "pt", "pt", "pt", "pt", "pt", "pt", "pt"]
Projects=["wikipedia", "wikipedia", "wikipedia", "wikipedia", "wikipedia", "wikipedia", "wikipedia", "wikipedia", "wikipedia", "wikipedia"]

#Printing the header
print ('\t\tBefore\t\t\t\tAfter')
print ('Articles\t4ºbi\t3ºbi\t2ºbi\t1ºbi\t1ºbi\t2ºbi\t3ºbi\t4ºbi')

for i in range(len(Articles)):
    url = endpoint(Articles[i],Dates[i],Wikis[i],Projects[i])
    response=ur.urlopen(url)
    content=response.read()
    date=json.loads(content.decode("utf8"))
    
    Listofdates=[]
    Listofviews=[]
    for item in date['items']:
        Listofdates.append(item['timestamp'])
        Listofviews.append(item['views'])
    Bimonthlysum(Articles[i],Listofdates,Listofviews,Dates[i])