import wordcloud
import jieba
from scipy.misc import imread #为图云形成根据特色的形状
mask = imread("C:\\Users\\Administrator\\Desktop\\python-lianxi\\chinamap.jpg")
f= open("C:\\Users\\Administrator\\Desktop\\python-lianxi\\改革开放四十周年讲话.txt","r",encoding="utf-8")
t = f.read()
f.close()
ls = jieba.lcut(t) #首先通过jieba库进行中文分词
txt = " ".join(ls)
w = wordcloud.WordCloud( font_path="C:\\Users\\Administrator\\Desktop\\python-lianxi\\msyh.ttc",\
width = 1000, height = 700, background_color = "white",\
mask = mask)
w.generate(txt)
w.to_file("C:\\Users\\Administrator\\Desktop\\chinamap.png")
绘制词云统计图
最新推荐文章于 2025-06-15 17:33:08 发布