【将查询到的数据插入数据表】
INSERT INTO temp_table(vin) (select distinct(vin) as vin from aaa where sale<>0 and substring(pay_time,1,7)='2018-02');
【给网页上POI点做准备】
select c.id as '充电站id',ci.id as '景点id',c.poi_id,c.poi_id,c.name,c.location_lon,c.location_lat,
replace(GROUP_CONCAT(CONCAT(ci.location_lon,',',ci.location_lat,';')),';,',';')
from poi_shanghai_charging_loc as c,poi_shanghai_scenic_loc as ci
where (c.location_lon > ci.location_lon-0.005 and
c.location_lon < ci.location_lon+0.005 and
c.location_lat > ci.location_lat-0.005 and
c.location_lat < ci.location_lat+0.005 )
group by c.id;
【POI点加上信息】
select c.id as '充电站id',ci.id as '景点id',c.poi_id,c.poi_id,c.name,ci.name,
concat(ci.name,',',group_concat(c.name)),
concat(CONCAT(ci.location_lon,',',ci.location_lat),';',replace(GROUP_CONCAT(CONCAT(c.location_lon,',',c.location_lat,';')),';,',';'))
from poi_shanghai_charging_loc as c,poi_shanghai_scenic_loc as ci
where (c.location_lon > ci.location_lon-0.01 and
c.location_lon < ci.location_lon+0.01 and
c.location_lat > ci.location_lat-0.01 and
c.location_lat < ci.location_lat+0.01 )
group by ci.id;
精简↓
select ci.id as '图书馆id',ci.poi_id,ci.name,
concat(ci.name,',',group_concat(c.name)),
concat(CONCAT(ci.location_lon,',',ci.location_lat),';',replace(GROUP_CONCAT(CONCAT(c.location_lon,',',c.location_lat,';')),';,',';'))
from poi_shanghai_charging_loc as c,poi_shanghai_scenic_loc as ci
where (c.location_lon > ci.location_lon-0.01 and
c.location_lon < ci.location_lon+0.01 and
c.location_lat > ci.location_lat-0.01 and
c.location_lat < ci.location_lat+0.01 )
group by ci.id;
CREATE TABLE `NewTable` (
`id` int(11) NOT NULL AUTO_INCREMENT ,
`dianping_id` int(11) NULL DEFAULT NULL ,
`shop_name` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL ,
`shop_lv` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL ,
`shop_ave_price` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL ,
`shop_comment_count` int(11) NULL DEFAULT NULL ,
`shop_type` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL ,
`shop_area` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL ,
`shop_addr` varchar(255) CHARACTER SET utf8 COLLATE utf8_general_ci NULL DEFAULT NULL ,
`taste` double NULL DEFAULT NULL ,
`env` double NULL DEFAULT NULL ,
`service` double UNSIGNED NULL DEFAULT NULL ,
PRIMARY KEY (`id`)
)
ENGINE=MyISAM
DEFAULT CHARACTER SET=utf8 COLLATE=utf8_general_ci
AUTO_INCREMENT=1
CHECKSUM=0
ROW_FORMAT=DYNAMIC
DELAY_KEY_WRITE=0
;
【查找有重复值的项】
select charge_id,count(*) as count from chargelife_info group by charge_id having count>1;
【搜索结果去重】
Select count(distinct charge_id) from chargelife_info
【查看默认的编码格式】
show variables like "%char%";
【复制表】
mysql中用命令行复制表结构的方法主要有一下几种:
1.只复制表结构到新表
1 CREATE TABLE 新表 SELECT * FROM 旧表 WHERE 1=2;
或
1 CREATE TABLE 新表 LIKE 旧表 ;
注意上面两种方式,前一种方式是不会复制时的主键类型和自增方式是不会复制过去的,而后一种方式是把旧表的所有字段类型都复制到新表。
2.复制表结构及数据到新表
1 CREATE TABLE 新表 SELECT * FROM 旧表
3.复制旧表的数据到新表(假设两个表结构一样)
1 INSERT INTO 新表 SELECT * FROM 旧表
4.复制旧表的数据到新表(假设两个表结构不一样)
1 INSERT INTO 新表(字段1,字段2,.......) SELECT 字段1,字段2,...... FROM 旧表
【MYSQL查询结果增加一列】
select b.*,'0' loginname from b_telnumber b;
在telnumber这张表中的查询结果中新增一列,并把这一列的所有值都设置为0.
[修改数据库默认编码]
vi etc/my.cnf
在[mysqld]中添加 character-set-server = utf8
重启 service mysqld restart
登录mysql 用status查看修改情况
【添加索引】
CREATE INDEX index_bill_date ON query_result(bill_date(255));
SHOW INDEX FROM table_name;
【date_format】
UPDATE starttime_select a
INNER JOIN (
select msisdn,cast(substring(change_date,1,19) as datetime) as varcharTOdatetime
from starttime_select
) b ON a.msisdn = b.msisdn
SET a.change_end_time = DATE_FORMAT(b.varcharTOdatetime,'%Y%m%d%H%i%S');
[按周查询数据]
select DATE_FORMAT(pt,'%Y-%u') as a,count(*),sum(actual_payment) from aaa where cate_name='bb' and order_status='cc' group by a;
【将b表查询结果更新到a表(用id查询)】
UPDATE tpt_package_order_last a
INNER JOIN (
SELECT
id,count(*) as count_all
FROM
tpt_package_order where order_zt='已完成' group by id
) b ON a.id = b.id and a.order_count is null
SET a.order_count = b.count_all;
【用一张表里的字段为条件结合另一张表查询数据】
#假设我有两张表:一张表tpt_package_order有user_id以及购买若干订单的记录,而另一张表user_id_buy_try有user_id和一个开始时间pay_time_start和一个结束时间pay_time_end,求在开始和结束时间段内用户购买订单的总额,如下:
SELECT a.user_id, SUM(b.amount)
FROM (
SELECT user_id, pay_time_start, pay_time_end
FROM user_id_buy_try
) a
INNER JOIN (
SELECT user_id, pay_time, amount
FROM tpt_package_order
WHERE order_zt = '已完成'
) b
ON a.user_id = b.user_id
AND b.pay_time BETWEEN a.pay_time_start AND a.pay_time_end
GROUP BY a.user_id;
【显示表的pt】
show partitions table_name;
【表转列】
+-----------+--------------------+---------+---------+
| cate_name | 2019-06 | 2019-07 | 2019-08 |
+-----------+--------------------+---------+---------+
|aa | 26574.9 | 0 | 0 |
| bb | 46631.8 | 0 | 1.5 |
| cc | 4862.639999999998 | 74.67 | 0 |
+-----------+--------------------+---------+---------+
select cate_name,max(`2019-06`) as '2019-06',max(`2019-07`) as '2019-07',max(`2019-08`) as '2019-08' from(
SELECT cate_name,
CASE substring(pt, 1, 6) WHEN '201906' THEN SUM(actual_payment) ELSE 0 END AS `2019-06`,
CASE substring(pt, 1, 6) WHEN '201907'THEN SUM(actual_payment) ELSE 0 END AS `2019-07`,
CASE substring(pt, 1, 6) WHEN '201908'THEN SUM(actual_payment) ELSE 0 END AS `2019-08`
FROM tt
GROUP BY cate_name, substring(pt, 1, 6)
)a group by cate_name;
【MySQL sql_mode=only_full_group_by】
查看sql_mode
select @@sql_mode;
查询出来的值为:
ONLY_FULL_GROUP_BY,STRICT_TRANS_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION
在查出结果中可以看到里面包含了ONLY_FULL_GROUP_BY,因此我们要讲ONLY_FULL_GROUP_BY从中去掉,重新设置值。
set @@global.sql_mode ='STRICT_TRANS_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION';
上面是改变了全局sql_mode,对于新建的数据库有效。对于已存在的数据库,则需要在对应的数据下执行
set sql_mode ='STRICT_TRANS_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION';
【Mysql 5.7 JSON_EXTRACT(多层json处理)】
json_response
{
"trade": {
"num": 1,
"order": [
{
"cid": 111
}
]
}
}
SELECT JSON_EXTRACT(json_response,'$.trade.order[0].cid') as a
FROM aaa
【对字段做处理】
SELECT school
, CASE
WHEN sc IN ('1','2') THEN '优秀'
WHEN sc IN ('3') THEN '良好'
ELSE '及格'
END AS sc,count(*)
FROM aaa
WHERE substring(add_time, 1, 4) IN ('2019')
GROUP BY school, sc
【毫秒转时间】
from_unixtime(SUBSTRING(a.ctime*1,1,10)) AS create_ts --取前10位--去掉最后三位毫秒
【查询表的分区数以及条数等信息--mysql】
select partition_name as part,partition_expression as expr,partition_description as descr,table_rows from information_schema.partitions
where table_schema= schema() and table_name='t'
;
【按id,只取最新的一条数据(修改时间 modified。按需可能是modified+某一个字段结合来取最新,用concat(id,modified)】
SELECT *
FROM (
SELECT *
,ROW_NUMBER() OVER (PARTITION BY id ORDER BY modified DESC ) AS rn
FROM table_name
WHERE pt >= 20180101
) a
WHERE a.rn = 1
【取数后放到一个字段内,做数组】
SELECT name
,SORT_ARRAY( collect_set( remark ) ) AS remark_all
FROM table
GROUP BY name --把小明的成绩都查出来然后塞数组里去
【一个json里有多个嵌套,拿出来做子表,例子:一个父订单json里嵌套了多个子订单】
SELECT b.row_number as traceid
FROM (
SELECT explode(
split(
regexp_replace(
regexp_replace(
SUBSTRING(actions,1,length(actions)-1) --删掉最后一个]
,'\\[{'
,concat('{\"traceid\":\"',row_number,'\",') --拼接traceid做关联
)
,'},{'
,concat('}split_sign{\"traceid\":\"',row_number,'\",') --拼接traceid做关联
)
,'split_sign'
)
) AS DATA
FROM bmcdm.dwd_bm_log_kanas_scene_trigger_source_di
WHERE pt = ${bizdate}
) a
LATERAL VIEW json_tuple(a.DATA,'traceid') b AS row_number --此处'traceid'为列名,可自定义
;
--actions 数据样例"[{*****},{******}]"
SELECT b.operator_id
,operator_ids
FROM table_name
LATERAL VIEW explode(SPLIT(operator_ids, ',')) b AS operator_id
WHERE pt = '${bizdate}'
;
SELECT
a.brand,
COUNT(1) brand_count,
concat_ws('|', collect_set(device)) device_brands,
sysdate(- 1) dt
FROM
(
SELECT
xxx_xxx [ 'brand' ] brand,
xxx2 device
FROM
xx.xxx_log
WHERE
dt = sysdate(- 1)
AND xx_id = 'xx2018_5131032'
) a
GROUP BY
a.brand
【查询一批数据,然后按照不同条件插入不同的表】
使用多路输出(MULTI INSERT)
from sale_detail
insert overwrite table sale_detail_multi partition (sale_date='2010', region='china' )
select shop_name, customer_id, total_price
insert overwrite table sale_detail_multi partition (sale_date='2010', region='china' )
select shop_name, customer_id, total_price;