Internship16

飞机大站与移动端理论

<!DOCTYPE html>
<html lang="en">
<head>
	<meta charset="UTF-8">
	<title>Document</title>
	<style type="text/css">
		* {
			margin: 0;
			padding: 0;
		}

		#view {
			width: 320px;
			height: 568px;
			background: url(img10-17/bg.png);
			margin: 0 auto;
		}

		#fle_me {
			width: 34px;
			height: 24px;
			background: url(img10-17/me.png);
			position:absolute;
		}
	</style>
</head>
<body>
	<div id="view"></div>
	<script type="text/javascript">
		// 1 飞机
		//1.1创建飞机
		var flyEle = document.createElement('div');
		var view = document.getElementById('view');
		flyEle.id = "fle_me";
		document.body.appendChild(flyEle);
		//1.2  飞机跟随鼠标
			// 确定事件 鼠标移动事件 文档上
		document.onmousemove = function(e) {
				//chilentX 鼠标的横轴位置
				var flyX = e.clientX;
				var flyY = e.clientY;
				var xCheck = flyX > view.offsetLeft && flyX < view.offsetLeft + view.offsetWidth - 34;
				var yCheck = flyY > view.offsetTop && flyY < view.offsetTop + view.offsetHeight - 24;
			if (xCheck && yCheck){


				flyEle.style.left = flyX + 'px';
				flyEle.style.top = flyY + 'px';
				//传递新的属性
				flyEle.flag = true;
		}
	}
			//2 子弹
			// 2.1创建子弹 有很多属性值 同时也有很多子弹
			//创建一个对象 包含多个子弹(top left)

			//创建子弹对象 bullet

			var objB = {
				name:'bullet',
				num:1,
				arr:[], //['id|top|left']    split | newArr[0]id  newArr[1] top newArr[2] left
				width:6,
				height:14
			}

			createBullet(objB);
			function createBullet(obj) {
				//创建对象
				setInterval(function() {
				if(flyEle.flag) {
					var ele = document.createElement('div');
					ele.id = obj.name + obj.num;//bullet1
					//获取索引
					var length = obj.arr.length;//已有的子弹个数
					obj.arr[length] = ele.id +'|';
					obj.num++;
					ele.style.width = obj.width + 'px';
					ele.style.height = obj.height + 'px';
					ele.style.position = 'absolute';
					ele.style.background = 'url(img10-17/b.png)';
					// flyEle.style.top 是字符串且带单位 所以要转成int
					ele.style.top = parseInt(flyEle.style.top) + 'px';
					ele.style.left = parseInt(flyEle.style.left) + 17 + 'px';
					//arr ['id1|top|left','id2|top|left']
					obj.arr[length] = obj.arr[length] + ele.style.top + '|' + ele.style.left;
					document.body.appendChild(ele);
					}
				},2000)
			}
			//实现单位时间内 让全部子弹移动
				function bulletMove() {
					if (flyEle.flag) {
						for (var i = 0;i < objB.arr.length; i++) {
							//遍历每个字符串id top left
							var newArr = objB.arr[i].split('|');
							//['id','top','left']
							//console.log(newArr);
							var eleB = document.getElementById(newArr[0]);
							newArr[1] = parseInt(newArr[1]) - 9;
							eleB.style.top = newArr[1] + 'px';
							//更新状态
							objB.arr[i] = newArr[0] + '|' + newArr[1] + '|' + newArr[2];

							if (parseInt(eleB.style.top) < 0) {
								objB.arr.splice(i,1);
								eleB.parentNode.removeChild(eleB);
							}
						}
					}
			}

			//创建敌机
			var objA = {
					name:'foe',
					num:1,
					arry:[],
					width:34,
					height:24
			}
			createfoe(objA);
			//设置敌机初始化
			function createfoe(obj) {
				setInterval(function() {
					if (flyEle.flag) {
					var foe = document.createElement('div');
					foe.id = obj.name + obj.num;
					var length = obj.arry.length;
					obj.arry[length] = foe.id + '|';
					obj.num++;
					foe.style.width = obj.width + 'px';
					foe.style.height = obj.height + 'px';
					foe.style.background = 'url(img10-17/foe.png)';
					foe.style.position = 'absolute';
					var ran = Math.random() * 286;
					foe.style.top = 0 + 'px';
					foe.style.left = view.offsetLeft + ran + 'px';
					obj.arry[length] = obj.arry[length] + foe.style.top + '|' + foe.style.left;
					document.body.appendChild(foe);
						}
				},2000)
			}
			function foeMove() {
				if (flyEle.flag) {
						for (var i = 0;i < objA.arry.length; i++) {
							//['id','top','left']
							var newArr = objA.arry[i].split('|');
								var foeB = document.getElementById(newArr[0]);
								newArr[1] = parseInt(newArr[1]) + 5;
								foeB.style.top = newArr[1] + 'px';
								objA.arry[i] = newArr[0] + '|' + newArr[1] + '|' + newArr[2];
								if (newArr[1] > view.offsetHeight - 24) {
									objA.arry.splice(i,1);
									var dele = document.getElementById(newArr[0]);
									dele.parentNode.removeChild(dele);
								}
						}


				}
			}
			setInterval(function() {
				bulletMove();
				foeMove();
				//遍历敌机数组
			for (var i = 0; i < objA.arry.length; i++) {
            var newArr = objA.arry[i].split('|');
            var eleF = document.getElementById(newArr[0]);
            var xFS = parseInt(newArr[2]);
            var xFE = parseInt(newArr[2]) + 34;
            var yFS = parseInt(newArr[1]);
            var yFE = parseInt(newArr[1]) + 24;
            for (var j = 0; j < objB.arr.length; j++) {
                var newArr1 = objB.arr[j].split('|');
                var eleB = document.getElementById(newArr1[0]);
                var xB = parseInt(newArr1[2]);
                var yB = parseInt(newArr1[1]);
                var xCheck = xB > xFS && xB < xFE;
                var yCheck = yB > yFS && yB < yFE;

                if (xCheck && yCheck) {
                    objA.arry.splice(i, 1);
                  	document.body.removeChild(eleF);
                    objB.arr.splice(j, 1);
                    eleB.parentNode.removeChild(eleB);
                }
            }
        }
		/*		for (var i =0;i < objA.arry.length; i++) {
					var newArr = objA.arry[i].split('|');
					var eleE = document.getElementById(newArr[0]);
					//表示敌机left
					var x = parseInt(newArr[2]);
					//表示敌机右边
					var xr = x + 34;
					var y = parseInt(newArr[1]);
					var yd = y + 24;
					//遍历子弹数组
					for (var j = 0;j < objB.arr.length; j++) {
						var newArr1 = objB.arr[j].split('|');
						var eleB = document.getElementById(newArr[0]);
						var xb = parseInt(newArr[2]);
						var yb = parseInt(newArr[1]);
						var xCheck = xb > x && xb < xr;
						var yCheck = yb > y && yb < yd;
						if (xCheck && yCheck) {
							objA.arry.splice(i,1);
							eleE.parentNode.removeChild(eleE);	
							objB.arr.splice(i,1);
							eleB.parentNode.removeChild(eleB);
						}
					}
				}*/
			},10)
	</script>
</body>
</html>

移动端

移动web开发

基础知识

1.屏幕

移动设备与PC设备最大的差异在于屏幕,这主要体现在屏幕尺寸和屏幕分辨率两个方面 。通常我们所指的屏幕尺寸,实际上指的是屏幕对角线的长度(一般用英寸来度量)

[外链图片转存失败,源站可能有防盗链机制,建议将图片保存下来直接上传(img-cvGe8mlR-1571639360882)(C:\Users\12870\AppData\Roaming\Typora\typora-user-images\1571326690320.png)]

​ 而分辨率则一般用像素来度量 px,表示屏幕水平和垂直方向的像素数,例如1920*1080指的是屏幕垂直方向和水平方向分别有1920和1080个像素点而构成,如下图所示

[外链图片转存失败,源站可能有防盗链机制,建议将图片保存下来直接上传(img-V5vqSYDl-1571639360885)(C:\Users\12870\AppData\Roaming\Typora\typora-user-images\1571326889244.png)]

2.长度单位

​ 在Web开发中可以使用px(像素)、em、pt(点)、in(英寸)、cm(厘米)做为长度单位,我们最常用px(像素)做为长度单位。

我们可以将上述的几种长度单位划分成相对长度单位和绝对长度单位。

[外链图片转存失败,源站可能有防盗链机制,建议将图片保存下来直接上传(img-dPT7SLxh-1571639360887)(C:\Users\12870\AppData\Roaming\Typora\typora-user-images\1571326934032.png)]

​ 如上图所示,iPhone3G/S和iPhone4/S的屏幕尺寸都为3.5英寸(in)但是屏幕分辨率却分别为480320px、960480px,由此我们可以得出英寸是一个绝对长度单位,而像素是一个相对长度单位(像素并没有固定的长度)。

3.像素密度

​ DPI(Dots Per Inch)是印刷行业中用来表示打印机每英寸可以喷的墨汁点数,计算机显示设备从打印机中借鉴了DPI的概念,由于计算机显示设备中的最小单位不是墨汁点而是像素,所以用PPI(Pixels Per
Inch)值来表示屏幕每英寸的像素数量,我们将PPI、DPI都称为像素密度,但PPI应用更广泛,DPI在Android设备比较常见。

[外链图片转存失败,源站可能有防盗链机制,建议将图片保存下来直接上传(img-p8U4gygk-1571639360892)(C:\Users\12870\AppData\Roaming\Typora\typora-user-images\1571326978586.png)]

Retina即视网膜屏幕,苹果注册的命名方式,意指具有较高PPI(大于320)的屏幕。

思考:在屏幕尺寸(英寸)固定时,PPI和像素大小的关系?

结论:屏幕尺寸固定时,当PPI 越大,像素的实际大小就会越小,画面越精细,当PPI越小,像素实际大小就越大。

4.设备独立像素

随着技术发展,设备不断更新,出现了不同PPI的屏幕共存的状态(如iPhone3G/S为163PPI,iPhone4/S为326PPI),像素不再是统一的度量单位,这会造成同样尺寸的图像在不同PPI设备上的显示大小不一样。

如下图,假设你设计了一个163163的蓝色方块,在PPI为163的屏幕上,那这个方块看起来正好就是11寸大小,在PPI为326的屏幕上,这个方块看起来就只有0.5*0.5寸大小了。

[外链图片转存失败,源站可能有防盗链机制,建议将图片保存下来直接上传(img-qhnfSPez-1571639360893)(C:\Users\12870\AppData\Roaming\Typora\typora-user-images\1571327028359.png)]

做为用户是不会关心这些细节的,他们只是希望在不同PPI的设备上看到的图像内容差不多大小,所以这时我们需要一个新的单位,这个新的单位能够保证图像内容在不同的PPI设备看上去大小应该差不多,这就是独立像素,在IOS设备上叫PT(Point),Android设备上叫DIP(Device independent Pixel)或DP。

举例说明就是iPhone 3G(PPI为163)1dp = 1px,iPhone 4(PPI为326)1dp = 2px。

[外链图片转存失败,源站可能有防盗链机制,建议将图片保存下来直接上传(img-oYwFriu6-1571639360896)(C:\Users\12870\AppData\Roaming\Typora\typora-user-images\1571327067741.png)]

我们也不难发现,如果想要iPhone 3G/S和iPhone 4/S图像内容显示一致,可以把iPhone 4/S的尺寸放大一倍(它们是一个2倍(@2x)的关系),即在iPhone3G/S的上尺寸为4444px,在iPhone4/S上为8888px,我们要想实现这样的结果可以设置4444dp,这时在iPhone3G/S上代表4444px,在iPhone4/S上代表88*88px,最终用可以看到的图像差不多大小。

通过上面例子我们不难发现dp同px是有一个对应(比例)关系的,这个对应(比例)关系是操作系统确定并处理,目的是确保不同PPI屏幕所能显示的图像大小是一致的,通过window.devicePixelRatio可以获得该比例值。

<head>
    <meta charset="UTF-8">
    <title>获取独立像素与物理像素比例值</title>
</head>
<body>
<script>
    // 像素和设备独立像素的一个关系
    alert(window.devicePixelRatio);

从上图我们得知dp(或pt)和px并不总是绝对的倍数关系(并不总能保证能够整除),而是window.devicePixelRatio ~= 物理像素/独立像素,然而这其中的细节我们不必关心,因为操作系统会自动帮我们处理好(保证1dp在不同的设备上看上去大小差不多)。

5.像素

1、物理像素指的是屏幕渲染图像的最小单位,属于屏幕的物理属性,不可人为进行改变,其值大小决定了屏幕渲染图像的品质,我们以上所讨论的都指的是物理像素。

// 获取屏幕的物理像素尺寸

window.screen.width;

window.screen.height;

    // 以像素计,屏幕的大小
    var screenWidth = window.screen.width;
    var screenHeight = window.screen.height;

    console.log('屏幕的宽度为: ' + screenWidth);
    console.log('屏幕的高度为: ' + screenHeight);

2、CSS像素,与设备无关像素,指的是通过CSS进行网页布局时用到的单位,其默认值(PC端)是和物理像素保持一致的(1个单位的CSS像素等于1个单位的物理像素),但是我们可通缩放来改变其大小。

<head>
    <meta charset="UTF-8">
    <title>CSS像素</title>
    <style>
        body {
            padding: 0;
            margin: 0;
            background-color: #F7F7F7;
            /*height: 1400px;*/
        }

        .box {
            改成自己的像素值
            width: 1152px;
            height: 120px;
            background-color: red;
        }
    </style>
</head>
<body>
<div class="box"></div>

我们需要理解的是物理像素和CSS像素的一个关系,1个物理像素并不总是等于一个CSS像素,通过调整浏览器缩放比例,可以有3种情况。

[外链图片转存失败,源站可能有防盗链机制,建议将图片保存下来直接上传(img-UWv1J5q8-1571639360901)(C:\Users\12870\AppData\Roaming\Typora\typora-user-images\1571327441696.png)]

调试

模拟调试
真机调试
unsupported at new Hash (node:internal/crypto/hash:79:19) at Object.createHash (node:crypto:139:10) at module.exports (D:\javaEE\internship\personmis\personmis-vue\node_modules\webpack\lib\util\createHash.js:135:53) at NormalModule._initBuildHash (D:\javaEE\internship\personmis\personmis-vue\node_modules\webpack\lib\NormalModule.js:417:16) at handleParseError (D:\javaEE\internship\personmis\personmis-vue\node_modules\webpack\lib\NormalModule.js:471:10) at D:\javaEE\internship\personmis\personmis-vue\node_modules\webpack\lib\NormalModule.js:503:5 at D:\javaEE\internship\personmis\personmis-vue\node_modules\webpack\lib\NormalModule.js:358:12 at D:\javaEE\internship\personmis\personmis-vue\node_modules\loader-runner\lib\LoaderRunner.js:373:3 at iterateNormalLoaders (D:\javaEE\internship\personmis\personmis-vue\node_modules\loader-runner\lib\LoaderRunner.js:214:10) at iterateNormalLoaders (D:\javaEE\internship\personmis\personmis-vue\node_modules\loader-runner\lib\LoaderRunner.js:221:10) at D:\javaEE\internship\personmis\personmis-vue\node_modules\loader-runner\lib\LoaderRunner.js:236:3 at runSyncOrAsync (D:\javaEE\internship\personmis\personmis-vue\node_modules\loader-runner\lib\LoaderRunner.js:130:11) at iterateNormalLoaders (D:\javaEE\internship\personmis\personmis-vue\node_modules\loader-runner\lib\LoaderRunner.js:232:2) at Array.<anonymous> (D:\javaEE\internship\personmis\personmis-vue\node_modules\loader-runner\lib\LoaderRunner.js:205:4) at Storage.finished (D:\javaEE\internship\personmis\personmis-vue\node_modules\enhanced-resolve\lib\CachedInputFileSystem.js:55:16) at D:\javaEE\internship\personmis\personmis-vue\node_modules\enhanced-resolve\lib\CachedInputFileSystem.js:91:9 10% building 2/5 modules 3 active ...\personmis\personmis-vue\node_modules\eslint-loader\index.js??ref--13-0!D:\javaEE\internship\personmis\personmis-vue\src\main.jsnode:internal/crypto/hash:79 this[kHandle] = new _Hash(algori
05-28
PS C:\Users\binfan.he\Desktop\internship\emtsDataAnlysis> & C:/Users/binfan.he/AppData/Local/Programs/Python/Python310/python.exe c:/Users/binfan.he/Desktop/internship/emtsDataAnlysis/measurement.py 点位筛选: 原始点数=144, 筛选后点数=128 (移除16个外围点位) Ignoring fixed y limits to fulfill fixed data aspect with adjustable data limits. Ignoring fixed x limits to fulfill fixed data aspect with adjustable data limits. Ignoring fixed x limits to fulfill fixed data aspect with adjustable data limits. open file: C:/Users/binfan.he/Desktop/internship/emtsDataAnlysis/data/W101_loc_20250725_104154.txt successful! get sta points:147 变换系数为: [-1.16 -2.02 9.92 17.98 0.06 -0.02 -0.14] error[mm]: bias=0.86, rep=0.14, rms=0.87, rmsMax=1.63, 95%CI=1.47, median=0.73 ori std=0.00[deg] PS C:\Users\binfan.he\Desktop\internship\emtsDataAnlysis> & C:/Users/binfan.he/AppData/Local/Programs/Python/Python310/python.exe c:/Users/binfan.he/Desktop/internship/emtsDataAnlysis/measurement_2.py open file: C:/Users/binfan.he/Desktop/internship/emtsDataAnlysis/data/5#/20250704 loc/loc_300cube-4-W101.txt successful! get sta points:65 变换系数为: [ 8.33 -10.88 11.84 -23.05 0.05 0.08 0.42] error[mm]: bias=0.66, rep=0.19, rms=0.69, rmsMax=1.27, 95%CI=0.99, median=0.63 ori std=0.00[deg] Traceback (most recent call last): File "c:\Users\binfan.he\Desktop\internship\emtsDataAnlysis\measurement_2.py", line 524, in <module> emtsPdvMeasure(case=2) File "c:\Users\binfan.he\Desktop\internship\emtsDataAnlysis\measurement_2.py", line 518, in emtsPdvMeasure pdvMeas(ps1, arr1) File "c:\Users\binfan.he\Desktop\internship\emtsDataAnlysis\measurement_2.py", line 422, in pdvMeas ax23.violinplot(errRmsBox, positions=disBox, showmedians=True, widths=20) File "C:\Users\binfan.he\AppData\Local\Programs\Python\Python310\lib\site-packages\matplotlib\_api\deprecation.py", line 453, in wrapper return func(*args, **kwargs) File "C:\Users\binfan.he\AppData\Local\Programs\Python\Python310\lib\site-packages\matplotlib\__init__.py", line 1521, in inner return func( File "C:\Users\binfan.he\AppData\Local\Programs\Python\Python310\lib\site-packages\matplotlib\axes\_axes.py", line 8615, in violinplot vpstats = cbook.violin_stats(dataset, _kde_method, points=points, File "C:\Users\binfan.he\AppData\Local\Programs\Python\Python310\lib\site-packages\matplotlib\cbook.py", line 1509, in violin_stats min_val = np.min(x) File "C:\Users\binfan.he\AppData\Local\Programs\Python\Python310\lib\site-packages\numpy\_core\fromnumeric.py", line 3302, in min return _wrapreduction(a, np.minimum, 'min', axis, None, out, File "C:\Users\binfan.he\AppData\Local\Programs\Python\Python310\lib\site-packages\numpy\_core\fromnumeric.py", line 86, in _wrapreduction return ufunc.reduce(obj, axis, dtype, out, **passkwargs) ValueError: zero-size array to reduction operation minimum which has no identity 这是我代码运行产生的报错,请告诉我报错原因,和如果我希望你能修正错误,你可能需要哪些代码中的函数2
07-30
(base) PS D:\2025\internship\pachong\py\my12306> scrapy crawl train 2025-07-15 16:31:48 [scrapy.utils.log] INFO: Scrapy 2.11.1 started (bot: my12306) 2025-07-15 16:31:48 [scrapy.utils.log] INFO: Versions: lxml 5.2.1.0, libxml2 2.13 .1, cssselect 1.2.0, parsel 1.8.1, w3lib 2.1.2, Twisted 23.10.0, Python 3.12.7 | packaged by Anaconda, Inc. | (main, Oct 4 2024, 13:17:27) [MSC v.1929 64 bit (AM D64)], pyOpenSSL 24.2.1 (OpenSSL 3.0.15 3 Sep 2024), cryptography 43.0.0, Platform Windows-11-10.0.26100-SP0 2025-07-15 16:31:48 [scrapy.addons] INFO: Enabled addons: [] 2025-07-15 16:31:48 [py.warnings] WARNING: D:\anaconda\Lib\site-packages\scrapy\u tils\request.py:254: ScrapyDeprecationWarning: '2.6' is a deprecated value for the 'REQUEST_FINGERPRINTER_IMPLEMENTATION' setting. It is also the default value. In other words, it is normal to get this warning if you have not defined a value for the 'REQUEST_FINGERPRINTER_IMPLEMENTATION' sett ing. This is so for backward compatibility reasons, but it will change in a future version of Scrapy. See the documentation of the 'REQUEST_FINGERPRINTER_IMPLEMENTATION' setting for information on how to handle this deprecation. return cls(crawler) 2025-07-15 16:31:48 [scrapy.extensions.telnet] INFO: Telnet Password: b500c51afa127fa5 2025-07-15 16:31:48 [scrapy.middleware] INFO: Enabled extensions: ['scrapy.extensions.corestats.CoreStats', 'scrapy.extensions.telnet.TelnetConsole', 'scrapy.extensions.logstats.LogStats', 'scrapy.extensions.throttle.AutoThrottle'] 2025-07-15 16:31:48 [scrapy.crawler] INFO: Overridden settings: {'AUTOTHROTTLE_ENABLED': True, 'AUTOTHROTTLE_START_DELAY': 10, 'BOT_NAME': 'my12306', 'DOWNLOADER_CLIENT_TLS_METHOD': 'TLSv1.2', 'DOWNLOAD_DELAY': 5, 'DOWNLOAD_TIMEOUT': 15, 'LOG_LEVEL': 'INFO', 'NEWSPIDER_MODULE': 'my12306.spiders', 'RETRY_HTTP_CODES': [302, 403, 404, 500, 502, 503, 504], 'RETRY_TIMES': 5, 'SPIDER_MODULES': ['my12306.spiders']} 2025-07-15 16:31:49 [scrapy.middleware] INFO: Enabled downloader middlewares: ['scrapy.downloadermiddlewares.httpauth.HttpAuthMiddleware', 'scrapy.downloadermiddlewares.downloadtimeout.DownloadTimeoutMiddleware', 'scrapy.downloadermiddlewares.defaultheaders.DefaultHeadersMiddleware', 'my12306.middlewares.RandomUserAgentMiddleware', 'scrapy.downloadermiddlewares.retry.RetryMiddleware', 'scrapy.downloadermiddlewares.redirect.MetaRefreshMiddleware', 'scrapy.downloadermiddlewares.httpcompression.HttpCompressionMiddleware', 'scrapy.downloadermiddlewares.redirect.RedirectMiddleware', 'scrapy.downloadermiddlewares.cookies.CookiesMiddleware', 'scrapy.downloadermiddlewares.httpproxy.HttpProxyMiddleware', 'scrapy.downloadermiddlewares.stats.DownloaderStats'] 2025-07-15 16:31:49 [scrapy.middleware] INFO: Enabled spider middlewares: ['scrapy.spidermiddlewares.httperror.HttpErrorMiddleware', 'scrapy.spidermiddlewares.offsite.OffsiteMiddleware', 'scrapy.spidermiddlewares.referer.RefererMiddleware', 'scrapy.spidermiddlewares.urllength.UrlLengthMiddleware', 'scrapy.spidermiddlewares.depth.DepthMiddleware'] 2025-07-15 16:31:49 [scrapy.middleware] INFO: Enabled item pipelines: ['my12306.pipelines.JsonWriterPipeline'] 2025-07-15 16:31:49 [scrapy.core.engine] INFO: Spider opened 2025-07-15 16:31:49 [scrapy.extensions.logstats] INFO: Crawled 0 pages (at 0 pages/min), scraped 0 items (at 0 items/min) 2025-07-15 16:31:49 [scrapy.extensions.telnet] INFO: Telnet console listening on 127.0.0.1:6023 2025-07-15 16:31:55 [scrapy.core.scraper] ERROR: Spider error processing <GET htt ps://kyfw.12306.cn/otn/login/init> (referer: https://kyfw.12306.cn/otn/index/init) Traceback (most recent call last): File "D:\2025\internship\pachong\py\my12306\my12306\spiders\train_spider.py", line 16, in safe_selector return Selector(response) ^^^^^^^^^^^^^^^^^^ File "D:\anaconda\Lib\site-packages\scrapy\selector\unified.py", line 97, in __init__ super().__init__(text=text, type=st, **kwargs) File "D:\anaconda\Lib\site-packages\parsel\selector.py", line 496, in __init__ root, type = _get_root_and_type_from_text( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\anaconda\Lib\site-packages\parsel\selector.py", line 377, in _get_root_and_type_from_text root = _get_root_from_text(text, type=type, **lxml_kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\anaconda\Lib\site-packages\parsel\selector.py", line 329, in _get_root_from_text return create_root_node(text, _ctgroup[type]["_parser"], **lxml_kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\anaconda\Lib\site-packages\parsel\selector.py", line 110, in create_root_node parser = parser_cls(recover=True, encoding=encoding, huge_tree=True) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\anaconda\Lib\site-packages\lxml\html\__init__.py", line 1887, in __init__ super().__init__(**kwargs) File "src\\lxml\\parser.pxi", line 1806, in lxml.etree.HTMLParser.__init__ File "src\\lxml\\parser.pxi", line 858, in lxml.etree._BaseParser.__init__ LookupError: unknown encoding: 'b'utf8'' During handling of the above exception, another exception occurred: Traceback (most recent call last): File "D:\anaconda\Lib\site-packages\scrapy\utils\defer.py", line 279, in iter_errback yield next(it) ^^^^^^^^ File "D:\anaconda\Lib\site-packages\scrapy\utils\python.py", line 350, in __next__ return next(self.data) ^^^^^^^^^^^^^^^ File "D:\anaconda\Lib\site-packages\scrapy\utils\python.py", line 350, in __next__ return next(self.data) ^^^^^^^^^^^^^^^ File "D:\anaconda\Lib\site-packages\scrapy\core\spidermw.py", line 106, in process_sync for r in iterable: File "D:\anaconda\Lib\site-packages\scrapy\spidermiddlewares\offsite.py", line 28, in <genexpr> return (r for r in result or () if self._filter(r, spider)) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\anaconda\Lib\site-packages\scrapy\core\spidermw.py", line 106, in process_sync for r in iterable: File "D:\anaconda\Lib\site-packages\scrapy\spidermiddlewares\referer.py", line 352, in <genexpr> return (self._set_referer(r, response) for r in result or ()) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\anaconda\Lib\site-packages\scrapy\core\spidermw.py", line 106, in process_sync for r in iterable: File "D:\anaconda\Lib\site-packages\scrapy\spidermiddlewares\urllength.py", line 27, in <genexpr> return (r for r in result or () if self._filter(r, spider)) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\anaconda\Lib\site-packages\scrapy\core\spidermw.py", line 106, in process_sync for r in iterable: File "D:\anaconda\Lib\site-packages\scrapy\spidermiddlewares\depth.py", line 31, in <genexpr> return (r for r in result or () if self._filter(r, response, spider)) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\anaconda\Lib\site-packages\scrapy\core\spidermw.py", line 106, in process_sync for r in iterable: File "D:\2025\internship\pachong\py\my12306\my12306\spiders\train_spider.py", line 241, in login_page sel = safe_selector(response) ^^^^^^^^^^^^^^^^^^^^^^^ File "D:\2025\internship\pachong\py\my12306\my12306\spiders\train_spider.py", line 31, in safe_selector return ParselSelector(text=text, type='html', encoding=encoding) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\anaconda\Lib\site-packages\parsel\selector.py", line 496, in __init__ root, type = _get_root_and_type_from_text( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\anaconda\Lib\site-packages\parsel\selector.py", line 377, in _get_root_and_type_from_text root = _get_root_from_text(text, type=type, **lxml_kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\anaconda\Lib\site-packages\parsel\selector.py", line 329, in _get_root_from_text return create_root_node(text, _ctgroup[type]["_parser"], **lxml_kwargs) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\anaconda\Lib\site-packages\parsel\selector.py", line 110, in create_root_node parser = parser_cls(recover=True, encoding=encoding, huge_tree=True) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "D:\anaconda\Lib\site-packages\lxml\html\__init__.py", line 1887, in __init__ super().__init__(**kwargs) File "src\\lxml\\parser.pxi", line 1806, in lxml.etree.HTMLParser.__init__ File "src\\lxml\\parser.pxi", line 858, in lxml.etree._BaseParser.__init__ LookupError: unknown encoding: 'b'utf8'' 2025-07-15 16:32:02 [train] INFO: 成功加载 3399 个车站信息 2025-07-15 16:32:02 [train] INFO: 部分车站示例: [('北京北', 'VAP'), ('北京东', 'BOP'), ('北京', 'BJP'), ('北京南', 'VNP'), ('北京大兴', 'IPP')] 请输入出发站: 北京 请输入到达站: 北京北 请输入日期(格式: yyyymmdd): 20250720 2025-07-15 16:33:01 [scrapy.extensions.logstats] INFO: Crawled 3 pages (at 3 pages/min), scraped 0 items (at 0 items/min) 2025-07-15 16:33:38 [scrapy.downloadermiddlewares.retry] ERROR: Gave up retrying <GET https://www.12306.cn/mormhweb/logFiles/error.html> (failed 6 times): [<twist ed.python.failure.Failure twisted.internet.error.ConnectionLost: Connection to the other side was lost in a non-clean fashion: Connection lost.>] 2025-07-15 16:33:38 [scrapy.core.scraper] ERROR: Error downloading <GET https://www.12306.cn/mormhweb/logFiles/error.html> Traceback (most recent call last): File "D:\anaconda\Lib\site-packages\scrapy\core\downloader\middleware.py", line 54, in process_request return (yield download_func(request=request, spider=spider)) twisted.web._newclient.ResponseNeverReceived: [<twisted.python.failure.Failure tw isted.internet.error.ConnectionLost: Connection to the other side was lost in a non-clean fashion: Connection lost.>] 2025-07-15 16:33:38 [scrapy.core.engine] INFO: Closing spider (finished) 2025-07-15 16:33:38 [scrapy.statscollectors] INFO: Dumping Scrapy stats: {'downloader/exception_count': 6, 'downloader/exception_type_count/twisted.web._newclient.ResponseNeverReceived': 6, 'downloader/request_bytes': 5916, 'downloader/request_count': 10, 'downloader/request_method_count/GET': 10, 'downloader/response_bytes': 86615, 'downloader/response_count': 4, 'downloader/response_status_count/200': 3, 'downloader/response_status_count/302': 1, 'elapsed_time_seconds': 109.671222, 'finish_reason': 'finished', 'finish_time': datetime.datetime(2025, 7, 15, 8, 33, 38, 966742, tzinfo=datetime.timezone.utc), 'httpcompression/response_bytes': 230430, 'httpcompression/response_count': 3, 'log_count/ERROR': 3, 'log_count/INFO': 13, 'log_count/WARNING': 1, 'request_depth_max': 2, 'response_received_count': 3, 'retry/count': 5, 'retry/max_reached': 1, 'retry/reason_count/twisted.web._newclient.ResponseNeverReceived': 5, 'scheduler/dequeued': 10, 'scheduler/dequeued/memory': 10, 'scheduler/enqueued': 10, 'scheduler/enqueued/memory': 10, 'spider_exceptions/LookupError': 1, 'start_time': datetime.datetime(2025, 7, 15, 8, 31, 49, 295520, tzinfo=datetime.timezone.utc)} 2025-07-15 16:33:38 [scrapy.core.engine] INFO: Spider closed (finished) (base) PS D:\2025\internship\pachong\py\my12306>
07-16
评论
成就一亿技术人!
拼手气红包6.0元
还能输入1000个字符
 
红包 添加红包
表情包 插入表情
 条评论被折叠 查看
添加红包

请填写红包祝福语或标题

红包个数最小为10个

红包金额最低5元

当前余额3.43前往充值 >
需支付:10.00
成就一亿技术人!
领取后你会自动成为博主和红包主的粉丝 规则
hope_wisdom
发出的红包
实付
使用余额支付
点击重新获取
扫码支付
钱包余额 0

抵扣说明:

1.余额是钱包充值的虚拟货币,按照1:1的比例进行支付金额的抵扣。
2.余额无法直接购买下载,可以购买VIP、付费专栏及课程。

余额充值