基本信息
源码名称:python爬取微博用户信息,绘制词云图
源码大小:0.24M
文件格式:.zip
开发语言:Python
更新时间:2020-02-24
   友情提示:(无需注册或充值,赞助后即可获取资源下载链接)

     嘿,亲!知识可是无价之宝呢,但咱这精心整理的资料也耗费了不少心血呀。小小地破费一下,绝对物超所值哦!如有下载和支付问题,请联系我们QQ(微信同号):813200300

本次赞助数额为: 4 元 
   源码介绍

    一个微博爬虫程序(对github做的二次开发),扩展了统计用户高频词生成词云图,以及文本输入栏等...

    运行前请确认环境配置无误,有问题可联系我


def get_json(self, params):

"""获取网页中json数据"""

url = 'https://m.weibo.cn/api/container/getIndex?'

r = requests.get(url, params=params)

return r.json()

    def get_user_info(self):

        """获取用户信息"""

        params = {'containerid': '100505' str(self.user_id)}

        js = self.get_json(params)

        if js['ok']:

            info = js['data']['userInfo']

            user_info = {}

            user_info['id'] = self.user_id

            user_info['screen_name'] = info.get('screen_name', '')

            user_info['gender'] = info.get('gender', '')

            user_info['statuses_count'] = info.get('statuses_count', 0)

            user_info['followers_count'] = info.get('followers_count', 0)

            user_info['follow_count'] = info.get('follow_count', 0)

            user_info['description'] = info.get('description', '')

            user_info['profile_url'] = info.get('profile_url', '')

            user_info['profile_image_url'] = info.get('profile_image_url', '')

            user_info['avatar_hd'] = info.get('avatar_hd', '')

            user_info['urank'] = info.get('urank', 0)

            user_info['mbrank'] = info.get('mbrank', 0)

            user_info['verified'] = info.get('verified', False)

            user_info['verified_type'] = info.get('verified_type', 0)

            user_info['verified_reason'] = info.get('verified_reason', '')

            user = self.standardize_info(user_info)

            self.user = user

            self.user_to_database()

            return user


    def get_long_weibo(self, id):

        """获取长微博"""

        url = 'https://m.weibo.cn/detail/%s' % id

        html = requests.get(url).text

        html = html[html.find('"status":'):]

        html = html[:html.rfind('"hotScheme"')]

        html = html[:html.rfind(',')]

        html = '{' html '}'

        js = json.loads(html, strict=False)

        weibo_info = js.get('status')

        if weibo_info:

            weibo = self.parse_weibo(weibo_info)

            return weibo


    def get_pics(self, weibo_info):

        """获取微博原始图片url"""

        if weibo_info.get('pics'):

            pic_info = weibo_info['pics']

            pic_list = [pic['large']['url'] for pic in pic_info]

            pics = ','.join(pic_list)

        else:

            pics = ''

        return pics


    def get_video_url(self, weibo_info):

        """获取微博视频url"""

        video_url = ''

        if weibo_info.get('page_info'):

            if weibo_info['page_info'].get('media_info'):

                media_info = weibo_info['page_info']['media_info']

                video_url = media_info.get('mp4_720p_mp4')

                if not video_url:

                    video_url = media_info.get('mp4_hd_url')

                    if not video_url:

                        video_url = media_info.get('mp4_sd_url')

                        if not video_url:

                            video_url = ''

        return video_url

        # 创建'weibo'表

        create_table = """

                CREATE TABLE IF NOT EXISTS weibo (

                id varchar(20) NOT NULL,

                bid varchar(12) NOT NULL,

                user_id varchar(20),

                screen_name varchar(20),

                text varchar(2000),

                topics varchar(200),

                at_users varchar(200),

                pics varchar(1000),

                video_url varchar(300),

                location varchar(100),

                created_at DATETIME,

                source varchar(30),

                attitudes_count INT,

                comments_count INT,

                reposts_count INT,

                retweet_id varchar(20),

                PRIMARY KEY (id)

                ) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4"""