Cool_Breeze 发表于 2020-6-8 18:48:11

爬取天气信息(我的第一个爬虫)增加简单的tkinter

本帖最后由 Cool_Breeze 于 2020-7-27 11:21 编辑

tkinter
#coding=utf-8

import tkinter as tk
import time
from tianqi import main
def home_mian():
    font = 'Consolas', 11
    home = tk.Tk()
    home.title('天气查询 by cool_breeze')
    home.geometry('800x600')
   
    var = tk.StringVar()
    varL = []
    for n in range(1,18):
      varL.append(tk.StringVar()) #将对象放入列表

    Llist = []
    for n in range(1,18):
      Llist.append(tk.Label(home, textvariable=varL, bg ='honeydew', font=font))
    L0 = tk.Label(home, textvariable=var, bg ='honeydew', font=font)

    for n in Llist:
      n.pack(fill=tk.X, side=tk.TOP)
    L0.pack(fill=tk.X, side=tk.TOP)

    def tishi():
      var.set('正在获取网页信息...')
      home.update() #更新窗口
    def zigong(n):
      tishi()
      res = main(n)
      for i,j in enumerate(varL):
            j.set(res)
      var.set('')
      
    def exit_():
      exit()
   
    zg_bt = tk.Button(home,text='自流井',font=font,bg = 'ivory',width=10,height=1,command=lambda:zigong(1)).pack(side='left')
    cd_bt = tk.Button(home,text='成都',font=font,bg = 'ivory',width=10,height=1,command=lambda:zigong(2)).pack(side='left')
    dg_bt = tk.Button(home,text='东莞',font=font,bg = 'ivory',width=10,height=1,command=lambda:zigong(3)).pack(side='left')
    ex_bt = tk.Button(home,text='退出',font=font,bg = 'ivory',width=10,height=1,command=exit_).pack(side='left')

    home.mainloop()
if __name__ == '__main__':
    home_mian()
爬虫#coding=utf-8

import urllib.request,urllib.error
from bs4 import BeautifulSoup as bfs
import os
import re

def main(number):
    city_list = [
                'positionCityID=59289; positionCityPinyin=zigong; lastCountyId=71992; lastCountyPinyin=ziliujing',
                'lastCountyId=56294; lastCountyPinyin=chengdu; lastCountyTime=1591617161',
                'lastCountyId=59289; lastCountyPinyin=dongguan; lastCountyTime=1591612281',
                ]
   
    url = 'http://tianqi.2345.com/'
   
    html = askurl(url, city_list)
    data = getdata(html)
    return data

def format(string):
    str_len = len(string)
    max_len = 13
    if str_len >= 1 and str_len <= max_len:
      return string.center(max_len - str_len)
    else:
      return string

def askurl(url,cookkey):
    head = {
    'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
    'Accept-Language': 'zh-CN,zh;q=0.9',
    'Cookie': 'qd_dz_ct=59289; sts=1; theme=-1; wc=59289; lc=59289; lc2=59289; wc_n=%25u4E1C%25u839E; ' + cookkey,
    'User-Agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.25 Safari/537.36 Core/1.70.3641.400 QQBrowser/10.4.3284.400'
    }
   
    req = urllib.request.Request(url=url, headers=head)
    try:
      response = urllib.request.urlopen(req)
      if response.chunked:
            print('访问:{} 失败!\n请检查网络是否可以正确的访问Internet!'.format(url))
            exit()
    except urllib.error.URLError as err:
      print('\n网络连接失败!\n请检查网络连接!', err)
      input('按任意键退出!')
      exit()
    html = bfs(response,'html.parser') #解析html
    # print(html)
   
    return html
def getdata(html):
    datalist = []
    datalist.append(html.find('em', class_="location-city location-font1").string)
    date_temp = html.find('p', class_="date").contents
    datalist.append(date_temp.string + ' ' + date_temp.string)
    datalist.append(date_temp.string)
    datalist.append(date_temp.string)
    #天气情况
    weather = html.find('a', class_="wea-info-index")
    datalist.append(weather.span.string + weather.b.string)
    datalist.append(html.find('a', class_="wea-other-a-we").string)
    datalist.append('空气质量:' + html.find('a', class_="wea-aqi-tip-index").em.string)
    weather = html.find('ul', class_="wea-info-tip").find_all('li')
    datalist.append(weather.span.string + ' : ' + weather.em.string)
    datalist.append(weather.span.string + ' : ' + weather.em.string)
    datalist.append(weather.span.string + ' : ' + weather.em.string)
    # print(datalist)
   
    #获取未来六天的天气数据
    tomorrow = [[],[],[],[],[],[]]
   
    def get_tomorrw(htmlobj, index): #相应数据
      temp = htmlobj.contents
      tomorrow.append(format(temp.text + ' ' + temp.text))
      tomorrow.append(format(temp.text))
      tomorrow.append(format(temp.text + ' ' + temp.text))
      tomorrow.append(format('空气质量:' + temp.text))

    info_tomorrow = html.find('ul', class_="weaday7 wea-white-icon")
    a_list = info_tomorrow.find_all('a')
    for day, index in zip(range(2,14,2),range(6)):
      get_tomorrw(a_list, index)
      
    #温度
    script = html.findAll('script')[-1]
    H = re.compile('var day7DataHight = \[(.*)\]')
    L = re.compile('var day7DataLow = \[(.*)\]')
    H_list = re.findall(H,str(script)).split(',')
    L_list = re.findall(L,str(script)).split(',')
    n = 0
    for i,j in zip(L_list, H_list):
      if not n:
            n+=1;continue
      tomorrow.insert(3, format(i + ' ~ ' + j))
      n+=1
    # print(datalist + tomorrow)   
    return datalist + tomorrow
if __name__ == '__main__':
    print(main(3))


myhic 发表于 2020-6-9 00:09:25

能改变城市吗?没有我所在的城市啊

Cool_Breeze 发表于 2020-6-9 00:43:44

本帖最后由 Cool_Breeze 于 2020-6-9 09:49 编辑

myhic 发表于 2020-6-9 00:09
能改变城市吗?没有我所在的城市啊

可以自己添加城市的,将Cookie添加到列表就好了!
页: [1]
查看完整版本: 爬取天气信息(我的第一个爬虫)增加简单的tkinter